mirror of
https://github.com/MarlinFirmware/Marlin.git
synced 2025-12-28 02:10:32 -07:00
🧑💻 Update example build scripts
This commit is contained in:
parent
c7e10c5622
commit
f0b960e2ef
22 changed files with 201 additions and 47909 deletions
|
|
@ -2,21 +2,22 @@
|
|||
#
|
||||
# Usage:
|
||||
#
|
||||
# build_all_examples [-b|--branch=<branch>] - Branch to fetch from Configurations repo (import-2.1.x)
|
||||
# build_all_examples [-a|--archive] - Copy the binary to the export location
|
||||
# [-B|--base] - Base path of configurations, overriding -b
|
||||
# [-b|--branch=<branch>] - Branch to fetch from Configurations repo (import-2.1.x)
|
||||
# [-c|--continue] - Continue the paused build
|
||||
# [-p|--purge] - Purge the status file and start over
|
||||
# [-s|--skip] - Continue the paused build, skipping one
|
||||
# [-r|--resume=<path>] - Start at some config in the filesystem order
|
||||
# [-l|--limit=#] - Limit the number of builds in this run
|
||||
# [-d|--debug] - Print extra debug output (after)
|
||||
# [-n|--nobuild] - Don't actually build anything
|
||||
# [-d|-v|--debug] - Print extra debug output (after)
|
||||
# [-e|--export=N] - Set CONFIG_EXPORT and export to the export location
|
||||
# [-f|--nofail] - Don't stop on a failed build
|
||||
# [-e|--export=N] - Set CONFIG_EXPORT and export into each config folder
|
||||
# [-a|--archive] - Copy the binary to the export location
|
||||
# [-h|--help] - Print usage and exit
|
||||
# [-l|--limit=#] - Limit the number of builds in this run
|
||||
# [-m|--many] - Build all the environments for each example
|
||||
# [-n|--nobuild] - Don't actually build anything
|
||||
# [-o|--output] - Redirect export / archiving to another location
|
||||
# (By default export to origin config folders)
|
||||
# [-h|--help] - Print usage and exit
|
||||
# [-p|--purge] - Purge the status file and start over
|
||||
# [-r|--resume=<path>] - Start at some config in the filesystem order
|
||||
# [-s|--skip] - Continue the paused build, skipping one
|
||||
#
|
||||
|
||||
HERE=`dirname $0`
|
||||
|
|
@ -29,21 +30,22 @@ STAT_FILE=./.pio/.buildall
|
|||
|
||||
usage() { echo "Usage:
|
||||
|
||||
build_all_examples [-b|--branch=<branch>] - Branch to fetch from Configurations repo (import-2.1.x)
|
||||
build_all_examples [-a|--archive] - Copy the binary to the export location
|
||||
[-B|--base] - Base path of configurations, overriding -b
|
||||
[-b|--branch=<branch>] - Branch to fetch from Configurations repo (import-2.1.x)
|
||||
[-c|--continue] - Continue the paused build
|
||||
[-p|--purge] - Purge the status file and start over
|
||||
[-s|--skip] - Continue the paused build, skipping one
|
||||
[-r|--resume=<path>] - Start at some config in the filesystem order
|
||||
[-d|-v|--debug] - Print extra debug output (after)
|
||||
[-e|--export=N] - Set CONFIG_EXPORT and export to the export location
|
||||
[-a|--archive] - Copy the binary to the export location
|
||||
[-o|--output] - Redirect export / archiving to another location
|
||||
(By default export to origin config folders)
|
||||
[-d|--debug] - Print extra debug output (after)
|
||||
[-l|--limit=#] - Limit the number of builds in this run
|
||||
[-n|--nobuild] - Don't actually build anything
|
||||
[-f|--nofail] - Don't stop on a failed build
|
||||
[-h|--help] - Print usage and exit
|
||||
[-l|--limit=#] - Limit the number of builds in this run
|
||||
[-m|--many] - Build all the environments for each example
|
||||
[-n|--nobuild] - Don't actually build anything
|
||||
[-o|--output] - Redirect export / archiving to another location
|
||||
(By default export to origin config folders)
|
||||
[-p|--purge] - Purge the status file and start over
|
||||
[-r|--resume=<path>] - Start at some config in the filesystem order
|
||||
[-s|--skip] - Continue the paused build, skipping one
|
||||
"
|
||||
}
|
||||
|
||||
|
|
@ -53,27 +55,29 @@ unset FIRST_CONF
|
|||
EXIT_USAGE=
|
||||
LIMIT=1000
|
||||
|
||||
while getopts 'aB:b:ce:fdhl:no:pr:sv-:' OFLAG; do
|
||||
while getopts 'aB:b:cde:fhl:mno:pr:sv-:' OFLAG; do
|
||||
case "${OFLAG}" in
|
||||
a) ARCHIVE=1 ; bugout "Archiving" ;;
|
||||
B) CBASE=${OPTARG%/} ; bugout "Base: $CBASE" ;;
|
||||
b) BRANCH=$OPTARG ; bugout "Branch: $BRANCH" ;;
|
||||
f) NOFAIL=1 ; bugout "Continue on Fail" ;;
|
||||
r) ISRES=1 ; FIRST_CONF=$OPTARG ; bugout "Resume: $FIRST_CONF" ;;
|
||||
c) CONTINUE=1 ; bugout "Continue" ;;
|
||||
s) CONTSKIP=1 ; bugout "Continue, skipping" ;;
|
||||
d|v) DEBUG=1 ; bugout "Debug ON" ;;
|
||||
e) CEXPORT=$OPTARG ; bugout "Export $CEXPORT" ;;
|
||||
o) OUTBASE="${OPTARG%/}" ; bugout "Archive to $OUTBASE" ;;
|
||||
f) NOFAIL=1 ; bugout "Continue on Fail" ;;
|
||||
h) EXIT_USAGE=1 ; break ;;
|
||||
l) LIMIT=$OPTARG ; bugout "Limit to $LIMIT build(s)" ;;
|
||||
d|v) DEBUG=1 ; bugout "Debug ON" ;;
|
||||
m) MANY=1 ; bugout "Many Envs" ;;
|
||||
n) DRYRUN=1 ; bugout "Dry Run" ;;
|
||||
o) OUTBASE="${OPTARG%/}" ; bugout "Archive to $OUTBASE" ;;
|
||||
p) PURGE=1 ; bugout "Purge stat file" ;;
|
||||
r) ISRES=1 ; FIRST_CONF=$OPTARG ; bugout "Resume: $FIRST_CONF" ;;
|
||||
s) CONTSKIP=1 ; bugout "Continue, skipping" ;;
|
||||
-) ONAM="${OPTARG%%=*}" ; OVAL="${OPTARG#*=}"
|
||||
case "$ONAM" in
|
||||
archive) ARCHIVE=1 ; bugout "Archiving" ;;
|
||||
base) CBASE=${OVAL%/} ; bugout "Base: $CBASE" ;;
|
||||
branch) BRANCH=$OVAL ; bugout "Branch: $BRANCH" ;;
|
||||
many) MANY=1 ; bugout "Many Envs" ;;
|
||||
nofail) NOFAIL=1 ; bugout "Continue on Fail" ;;
|
||||
resume) ISRES=1 ; FIRST_CONF=$OVAL ; bugout "Resume: $FIRST_CONF" ;;
|
||||
continue) CONTINUE=1 ; bugout "Continue" ;;
|
||||
|
|
@ -179,6 +183,9 @@ find -ds "$CBASE"/config/examples -type d -name 'Configuration.h' -o -name 'Conf
|
|||
# Exporting? Add -e argument
|
||||
((CEXPORT)) && CARGS+=("-e" "$CEXPORT")
|
||||
|
||||
# Build many environments? Add -m argument
|
||||
((MANY)) && CARGS+=("-m")
|
||||
|
||||
# Continue on fail? Add -f argument
|
||||
((NOFAIL)) && CARGS+=("-f")
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@
|
|||
#
|
||||
# build_example -b|--base=<path> - Configurations root folder (e.g., ./.pio/build-BRANCH)
|
||||
# -c|--config=<rel> - Sub-path of the configs to build (within config/examples)
|
||||
# [-n|--index=N] - Which environment to build, by index (Based on pins.h comments)
|
||||
# [-m|--many] - Build all the board's environments listed in pins.h
|
||||
# [-e|--export=N] - Use CONFIG_EXPORT N to export the config to the export location
|
||||
# [-a|--archive] - Archive the build (to the export location)
|
||||
# [-o|--output] - Redirect export / archiving to another location
|
||||
|
|
@ -19,6 +21,8 @@ usage() { echo "Usage:
|
|||
|
||||
build_example -b|--base=<path> - Configurations root folder (e.g., ./.pio/build-BRANCH)
|
||||
-c|--config=<rel> - Sub-path of the configs to build (within config/examples)
|
||||
[-n|--index=N] - Which environment to build, by index (Based on pins.h comments)
|
||||
[-m|--many] - Build all the board's environments listed in pins.h
|
||||
[-e|--export=N] - Use CONFIG_EXPORT N to export the config to the export location
|
||||
[-a|--archive] - Archive the build (to the export location)
|
||||
[-o|--output] - Redirect export / archiving to another location
|
||||
|
|
@ -50,15 +54,20 @@ REVEAL=
|
|||
EXPNUM=
|
||||
NOFAIL=
|
||||
OUTBASE=
|
||||
while getopts 'ab:c:e:fhio:r-:' OFLAG; do
|
||||
BUILDINDEX=1
|
||||
MANY=
|
||||
|
||||
while getopts 'ab:c:e:fhmn:o:r-:' OFLAG; do
|
||||
case "${OFLAG}" in
|
||||
a) ARCHIVE=1 ;;
|
||||
b) BASE="${OPTARG%/}" ;;
|
||||
c) CONFIG="${OPTARG%/}" ;;
|
||||
e) EXPNUM="$OPTARG" ;;
|
||||
o) OUTBASE="${OPTARG%/}" ;;
|
||||
h) EXIT_USAGE=1 ; break ;;
|
||||
f) NOFAIL=1 ;;
|
||||
h) EXIT_USAGE=1 ; break ;;
|
||||
m) MANY=1 ;;
|
||||
n) BUILDINDEX="$OPTARG" ;;
|
||||
o) OUTBASE="${OPTARG%/}" ;;
|
||||
r) REVEAL=1 ;;
|
||||
-) ONAM="${OPTARG%%=*}" ; OVAL="${OPTARG#*=}"
|
||||
case "$ONAM" in
|
||||
|
|
@ -66,6 +75,8 @@ while getopts 'ab:c:e:fhio:r-:' OFLAG; do
|
|||
allow) ALLOW=1 ;;
|
||||
base) BASE="${OVAL%/}" ;;
|
||||
config) CONFIG="${OVAL%/}" ;;
|
||||
many) MANY=1 ;;
|
||||
index) BUILDINDEX="$OVAL" ;;
|
||||
export) EXPNUM="$OVAL" ;;
|
||||
output) OUTBASE="${OVAL%/}" ;;
|
||||
help) EXIT_USAGE=1 ; break ;;
|
||||
|
|
@ -154,11 +165,12 @@ ENAME=("-name" "marlin_config.json" \
|
|||
"-o" "-name" "schema.yml")
|
||||
|
||||
# Possible built firmware names (in the build folder)
|
||||
BNAME=("-name" 'firmware*.hex' \
|
||||
BNAME=("-name" "firmware*.hex" \
|
||||
"-o" "-name" "firmware*.bin" \
|
||||
"-o" "-name" "project*.bin" \
|
||||
"-o" "-name" "Robin*.bin" \
|
||||
"-o" "-name" "main_*.bin")
|
||||
"-o" "-name" "main_*.bin" \
|
||||
"-o" "-name" "MarlinSimulator*")
|
||||
|
||||
mkdir -p "$BUILD"
|
||||
|
||||
|
|
@ -166,64 +178,113 @@ mkdir -p "$BUILD"
|
|||
if [[ $EXPNUM ]]; then
|
||||
opt_set CONFIG_EXPORT $EXPNUM
|
||||
# Clean up old exports
|
||||
find "$BUILD" \( "${ENAME[@]}" \) -exec rm "{}" \;
|
||||
find "$BUILD" -type f \( "${ENAME[@]}" \) -exec rm "{}" \;
|
||||
fi
|
||||
|
||||
((ARCHIVE)) && find "$BUILD" \( "${BNAME[@]}" \) -exec rm "{}" \;
|
||||
((ARCHIVE)) && find "$BUILD" -type f \( "${BNAME[@]}" \) -exec rm "{}" \;
|
||||
|
||||
set +e
|
||||
echo "Building example $CONFIG..."
|
||||
|
||||
echo "Building example $CONFIG ..."
|
||||
mftest -s -a -n1 ; ERR=$?
|
||||
# If doing many builds get a list of all environment names,
|
||||
# which also gives us the number of environments.
|
||||
if ((MANY)); then
|
||||
ENVLIST=$(mfenvs) # BOARD_NAME_STRING (1234): [ env1 env2 env3 ... ]
|
||||
ENVLIST=${ENVLIST##*: [ }
|
||||
ENVARRAY=(${ENVLIST% ]})
|
||||
ENVCOUNT=${#ENVARRAY[*]}
|
||||
((ENVCOUNT)) || { alrt "mfenvs failed for this board." ; exit 1 ; }
|
||||
echo "Found $ENVCOUNT environment(s): ${ENVARRAY[*]}"
|
||||
fi
|
||||
|
||||
((ERR)) && alrt "Failed ($ERR)" || annc "Success"
|
||||
# Run one or more builds based on --many
|
||||
# Build all from BUILDINDEX onward (usually 1) meaning ALL.
|
||||
# MANY with a BUILDINDEX may be useful for continuing an interrupted build.
|
||||
|
||||
set -e
|
||||
while ((1)); do
|
||||
set +e
|
||||
|
||||
if [[ $ERR -gt 0 ]]; then
|
||||
echo "Building example $CONFIG ($BUILDINDEX)..."
|
||||
|
||||
# Run a build and record the error number
|
||||
mftest -s -a -n$BUILDINDEX ; ERR=$?
|
||||
|
||||
# "Index out of range" can fail without an error
|
||||
((MANY)) && ((ERR == 66)) && ERR=0 && break # "index out of range"
|
||||
|
||||
set -e
|
||||
|
||||
if [[ $ERR -gt 0 ]]; then
|
||||
|
||||
alrt "Failed ($ERR)"
|
||||
|
||||
# Error? For --nofail simply log. Otherwise return the error.
|
||||
if [[ -n $NOFAIL ]]; then
|
||||
date +"%F %T [FAIL] $CONFIG ($BUILDINDEX)" >>./.pio/error-log.txt
|
||||
else
|
||||
exit $ERR
|
||||
fi
|
||||
|
||||
# Error? For --nofail simply log. Otherwise return the error.
|
||||
if [[ -n $NOFAIL ]]; then
|
||||
date +"%F %T [FAIL] $CONFIG" >>./.pio/error-log.txt
|
||||
else
|
||||
exit $ERR
|
||||
fi
|
||||
|
||||
else
|
||||
annc "Success"
|
||||
|
||||
# Copy exports back to the configs
|
||||
if [[ -n $EXPNUM ]]; then
|
||||
annc "Exporting $EXPNUM"
|
||||
[[ -f Marlin/Config-export.h ]] && { cp Marlin/Config-export.h "$ARCSUB"/Config.h ; }
|
||||
find "$BUILD" \( "${ENAME[@]}" \) -exec cp "{}" "$ARCSUB" \;
|
||||
fi
|
||||
# Copy exports back to the configs
|
||||
if [[ -n $EXPNUM ]]; then
|
||||
annc "Exporting $EXPNUM"
|
||||
[[ -f Marlin/Config-export.h ]] && { cp Marlin/Config-export.h "$ARCSUB"/Config.h ; }
|
||||
find "$BUILD" -type f \( "${ENAME[@]}" \) -exec cp "{}" "$ARCSUB" \;
|
||||
fi
|
||||
|
||||
# Copy potential firmware files into the config folder
|
||||
# TODO: Consider firmware that needs an STM32F4_UPDATE folder.
|
||||
# Currently only BOARD_CREALITY_F401RE env:STM32F401RE_creality
|
||||
if ((ARCHIVE)); then
|
||||
annc "Archiving"
|
||||
rm -f "$ARCSUB"/*.bin.tar.gz "$ARCSUB"/*.hex.tar.gz
|
||||
find "$BUILD" \( "${BNAME[@]}" \) -exec sh -c '
|
||||
ARCSUB="$1"
|
||||
CONFIG="$2"
|
||||
shift 2
|
||||
for FILE in "$@"; do
|
||||
cd "${FILE%/*}"
|
||||
NAME=${FILE##*/}
|
||||
SHRT=${NAME%.*}
|
||||
# When building many, create sub-folders for each build env name
|
||||
if [[ -n $MANY && $ENVCOUNT -gt 1 ]]; then
|
||||
ENV=${ENVARRAY[BUILDINDEX-1]}
|
||||
ARCENVSUB="$ARCSUB/$ENV"
|
||||
else
|
||||
ARCENVSUB="$ARCSUB"
|
||||
fi
|
||||
|
||||
# Copy potential firmware files into the config folder
|
||||
# TODO: Consider firmware that needs an STM32F4_UPDATE folder.
|
||||
# Currently only BOARD_CREALITY_F401RE env:STM32F401RE_creality
|
||||
if ((ARCHIVE)); then
|
||||
annc "Archiving"
|
||||
find "$BUILD" -type f \( "${BNAME[@]}" \) -exec sh -c '
|
||||
ARCDIR="$1" ; CONFIG="$2" ; FILE="$3" ; shift 3
|
||||
NAME=${FILE##*/} ; SHRT=${NAME%.*} ; DIR=${FILE%/*}
|
||||
ZIPX=
|
||||
if [[ $CONFIG == *Simulator* ]]; then
|
||||
case $(uname | tr '[:upper:]' '[:lower:]') in
|
||||
darwin) SUB="macOS" ; ZIPX="-X" ;;
|
||||
*linux) SUB="Linux" ;;
|
||||
win*) SUB="Windows" ;;
|
||||
msys*) SUB="Windows" ;;
|
||||
cygwin*) SUB="Windows" ;;
|
||||
mingw*) SUB="Windows" ;;
|
||||
*) SUB='Unix' ;;
|
||||
esac
|
||||
ARCH=$(uname -m | tr '[:lower:]' '[:upper:]')
|
||||
ARCDIR="$ARCDIR/$SUB-$ARCH"
|
||||
fi
|
||||
mkdir -p "$ARCDIR"
|
||||
rm -f "$ARCDIR"/*.zip "$ARCDIR"/*.sha256.txt
|
||||
cd "$DIR"
|
||||
SHASUM=$(sha256sum "$NAME" | cut -d" " -f1)
|
||||
tar -czf "$ARCSUB/$SHRT.tar.gz" "$NAME"
|
||||
echo "$CONFIG\n$SHASUM" > "$ARCSUB/$NAME.sha256.txt"
|
||||
rm "$NAME"
|
||||
echo "$CONFIG\n$SHASUM" > "$ARCDIR/$NAME.sha256.txt"
|
||||
zip $ZIPX "$ARCDIR/$SHRT.zip" "$NAME" && rm "$NAME"
|
||||
cd - >/dev/null
|
||||
done
|
||||
' sh "$ARCSUB" "$CONFIG" {} +
|
||||
' sh "$ARCENVSUB" "$CONFIG" {} +
|
||||
fi
|
||||
|
||||
# Reveal the configs after the build, if requested
|
||||
((REVEAL)) && { annc "Revealing $ARCENVSUB" ; open "$ARCENVSUB" ; }
|
||||
|
||||
fi
|
||||
|
||||
# Reveal the configs after the build, if requested
|
||||
((REVEAL)) && { annc "Revealing $ARCSUB" ; open "$ARCSUB" ; }
|
||||
((MANY)) || break # Only one build if not --many
|
||||
|
||||
fi
|
||||
# Set up for the next build, if there is one
|
||||
((++BUILDINDEX > ENVCOUNT)) && break
|
||||
|
||||
done
|
||||
|
||||
exit 0
|
||||
|
|
|
|||
33
buildroot/bin/mfenvs
Executable file
33
buildroot/bin/mfenvs
Executable file
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# mfenvs Print the current board and environment information
|
||||
# Output -> "SHORT_NAME (###): [ env1 env2 env3 ... ]"
|
||||
#
|
||||
|
||||
[[ -d Marlin/src ]] || { echo "Please 'cd' to the Marlin repo root." ; exit 1 ; }
|
||||
which pio >/dev/null || { echo "Make sure 'pio' is in your execution PATH." ; exit 1 ; }
|
||||
|
||||
errout() { echo -e "\033[0;31m$1\033[0m" ; }
|
||||
|
||||
case $(uname | tr '[:upper:]' '[:lower:]') in
|
||||
darwin) SYS='mac' ;;
|
||||
*linux) SYS='lin' ;;
|
||||
win*) SYS='win' ;;
|
||||
msys*) SYS='win' ;;
|
||||
cygwin*) SYS='win' ;;
|
||||
mingw*) SYS='win' ;;
|
||||
*) SYS='uni' ;;
|
||||
esac
|
||||
|
||||
ACODE='/^[[:space:]]*#define[[:space:]]MOTHERBOARD[[:space:]]/ { sub(/^BOARD_/, "", $3); print $3 }'
|
||||
MB=$(awk "$ACODE" Marlin/Configuration.h 2>/dev/null)
|
||||
[[ -z $MB ]] && MB=$(awk "$ACODE" Marlin/Config.h 2>/dev/null)
|
||||
[[ -z $MB ]] && { echo "Error - Can't read MOTHERBOARD setting." ; exit 1 ; }
|
||||
BLINE=$( grep -E "define\s+BOARD_$MB\b" Marlin/src/core/boards.h )
|
||||
BNUM=$( sed -E 's/^.+BOARD_[^ ]+ +([0-9]+).+$/\1/' <<<"$BLINE" )
|
||||
[[ -z $BNUM ]] && { echo "Error - Can't find BOARD_$MB in core/boards.h." ; exit 1 ; }
|
||||
ENVS=( $( grep -EA1 "MB\(.*\b$MB\b.*\)" Marlin/src/pins/pins.h | grep -E "#include.+//.+(env|$SYS):[^ ]+" | grep -oE "(env|$SYS):[^ ]+" | sed -E "s/(env|$SYS)://" ) )
|
||||
[[ -z $ENVS ]] && { errout "Error - Can't find target(s) for $MB ($BNUM)." ; exit 1 ; }
|
||||
ECOUNT=${#ENVS[*]}
|
||||
[[ $ECOUNT == 1 ]] && EOUT=$ENVS || EOUT="${ENVS[@]}"
|
||||
echo "$MB ($BNUM): [ $EOUT ]"
|
||||
|
|
@ -16,23 +16,24 @@ bugout() { ((DEBUG)) && echo -e "\033[0;32m$1\033[0m" ; }
|
|||
|
||||
usage() {
|
||||
echo "
|
||||
Usage: mftest [-t|--env=<env|index>] [-n|--num=<num>] [-m|--make] [-y|--build=<Y|n>]
|
||||
mftest [-a|--autobuild]
|
||||
Usage: mftest [-a|--autobuild]
|
||||
mftest [-r|--rebuild]
|
||||
mftest [-s|--silent]
|
||||
mftest [-t|--env=<env|index>] [-n|--num=<num>] [-m|--make] [-y|--build=<Y|n>]
|
||||
mftest [-u|--autoupload] [-n|--num=<num>]
|
||||
|
||||
OPTIONS
|
||||
-t --env The environment to apply / run, or the menu index number.
|
||||
-n --num The index of the test to run. (In file order.)
|
||||
-m --make Use the make / Docker method for the build.
|
||||
-y --build Skip 'Do you want to build this test?' and assume YES.
|
||||
-h --help Print this help.
|
||||
-a --autobuild PIO Build using the MOTHERBOARD environment.
|
||||
-d --default Restore to defaults before applying configs.
|
||||
-h --help Print this help.
|
||||
-m --make Use the make / Docker method for the build.
|
||||
-n --num The index of the test to run. (In file order.)
|
||||
-r --rebuild Rebuild previous PIO Build.
|
||||
-s --silent Silence build output from PlatformIO.
|
||||
-t --env The environment to apply / run, or the menu index number.
|
||||
-u --autoupload PIO Upload using the MOTHERBOARD environment.
|
||||
-v --verbose Extra output for debugging.
|
||||
-s --silent Silence build output from PlatformIO.
|
||||
-d --default Restore to defaults before applying configs.
|
||||
-y --build Skip 'Do you want to build this test?' and assume YES.
|
||||
|
||||
env shortcuts: tree due esp lin lp8|lpc8 lp9|lpc9 m128 m256|mega stm|f1 f4 f7 s6 teensy|t31|t32 t35|t36 t40|t41
|
||||
"
|
||||
|
|
@ -56,7 +57,7 @@ TESTENV='-'
|
|||
CHOICE=0
|
||||
DEBUG=0
|
||||
|
||||
while getopts 'abdhmrsuvyn:t:-:' OFLAG; do
|
||||
while getopts 'adhmn:rst:uvy-:' OFLAG; do
|
||||
case "${OFLAG}" in
|
||||
a) AUTO_BUILD=1 ; bugout "Auto-Build target..." ;;
|
||||
d) DL_DEFAULTS=1 ; bugout "Restore to defaults..." ;;
|
||||
|
|
@ -195,7 +196,7 @@ if ((AUTO_BUILD)); then
|
|||
fi
|
||||
else
|
||||
echo "Detected \"$BDESC\" | $MB ($BNUM)."
|
||||
[[ $CHOICE > $ECOUNT ]] && { echo "Environment selection out of range." ; exit 1 ; }
|
||||
[[ $CHOICE > $ECOUNT ]] && { echo "Environment selection out of range." ; exit 66 ; }
|
||||
fi
|
||||
TARGET="${ENVS[$CHOICE-1]}"
|
||||
if [[ $MB == 'SIMULATED' && $TARGET == 'linux_native' ]]; then
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys, os,config
|
||||
import sys, os, config
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys, os,config
|
||||
import sys, os, config
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@
|
|||
# Author: Taylor Talkington
|
||||
# License: GPL
|
||||
|
||||
import bdflib.reader
|
||||
import math
|
||||
import bdflib.reader, math
|
||||
|
||||
def glyph_bits(size_x, size_y, font, glyph_ord):
|
||||
asc = font[b'FONT_ASCENT']
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ Marlin Firmware Commands:
|
|||
mfadd ....... Fetch a remote branch from any Marlin fork
|
||||
mfclean ..... Attempt to clean up merged and deleted branches
|
||||
mfdoc ....... Build the website, serve locally, and browse
|
||||
mfenvs ...... Get current board SHORT_NAME (###): [ env1 env2 ... ]
|
||||
mffp ........ Push new commits directly to MarlinFirmware
|
||||
mfinfo ...... Provide branch information (for the other scripts)
|
||||
mfinit ...... Create an 'upstream' remote for 'MarlinFirmare'
|
||||
|
|
|
|||
|
|
@ -2,14 +2,9 @@
|
|||
# MarlinBinaryProtocol.py
|
||||
# Supporting Firmware upload via USB/Serial, saving to the attached media.
|
||||
#
|
||||
import serial
|
||||
import math
|
||||
import time
|
||||
import serial, math, time, threading, sys, datetime, random
|
||||
from collections import deque
|
||||
import threading
|
||||
import sys
|
||||
import datetime
|
||||
import random
|
||||
|
||||
try:
|
||||
import heatshrink2 as heatshrink
|
||||
heatshrink_exists = True
|
||||
|
|
|
|||
|
|
@ -18,8 +18,7 @@ Options:
|
|||
--num-temps=... the number of temperature points to calculate (default: 36)
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
from __future__ import print_function, division
|
||||
|
||||
from math import *
|
||||
import sys, getopt
|
||||
|
|
|
|||
|
|
@ -5,8 +5,7 @@
|
|||
#
|
||||
# Usage: rle16_compress_cpp_image_data.py INPUT_FILE.cpp OUTPUT_FILE.cpp
|
||||
#
|
||||
import sys,struct
|
||||
import re
|
||||
import sys, struct, re
|
||||
|
||||
def addCompressedData(input_file, output_file):
|
||||
ofile = open(output_file, 'wt')
|
||||
|
|
|
|||
|
|
@ -6,10 +6,11 @@
|
|||
#
|
||||
# Usage: rle_compress_bitmap.py INPUT_FILE OUTPUT_FILE
|
||||
#
|
||||
import sys,struct
|
||||
import re
|
||||
import sys, struct, re
|
||||
|
||||
def addCompressedData(input_file, output_file):
|
||||
input_lines = input_file.readlines()
|
||||
input_file.close()
|
||||
ofile = open(output_file, 'wt')
|
||||
|
||||
datatype = "uint8_t"
|
||||
|
|
@ -18,8 +19,7 @@ def addCompressedData(input_file, output_file):
|
|||
arrname = ''
|
||||
|
||||
c_data_section = False ; c_skip_data = False ; c_footer = False
|
||||
while True:
|
||||
line = input_file.readline()
|
||||
for line in input_lines:
|
||||
if not line: break
|
||||
|
||||
if not c_footer:
|
||||
|
|
@ -56,8 +56,6 @@ def addCompressedData(input_file, output_file):
|
|||
arrname = line.split('[')[0].split(' ')[-1]
|
||||
print("Found data array", arrname)
|
||||
|
||||
input_file.close()
|
||||
|
||||
#print("\nRaw Bitmap Data", raw_data)
|
||||
|
||||
#
|
||||
|
|
@ -190,11 +188,11 @@ if len(sys.argv) <= 2:
|
|||
print('Usage: rle_compress_bitmap.py INPUT_FILE OUTPUT_FILE')
|
||||
exit(1)
|
||||
|
||||
output_cpp = sys.argv[2]
|
||||
output_h = sys.argv[2]
|
||||
inname = sys.argv[1].replace('//', '/')
|
||||
try:
|
||||
input_cpp = open(inname)
|
||||
input_h = open(inname)
|
||||
print("Processing", inname, "...")
|
||||
addCompressedData(input_cpp, output_cpp)
|
||||
addCompressedData(input_h, output_h)
|
||||
except OSError:
|
||||
print("Can't find input file", inname)
|
||||
|
|
|
|||
|
|
@ -1,40 +0,0 @@
|
|||
Overview:
|
||||
1) Install Sublime
|
||||
2) Install Deviot (?optional?)
|
||||
3) Install WebDevShell (this will execute the auto-build script)
|
||||
4) Copy the menu configuration to the proper Sublime directory
|
||||
5) Add platformio to your path (usually not needed)
|
||||
|
||||
|
||||
Sublime with autobuild
|
||||
Tools
|
||||
Install Package Control
|
||||
Tools
|
||||
Command Palette
|
||||
Package Control: Install Package
|
||||
type in deviot and click on it
|
||||
Tools
|
||||
Command Palette
|
||||
Package Control: Install Package
|
||||
type in WebDevShell and click on it
|
||||
|
||||
in Sublime, open Marlin directory with "platformio.ini" in it
|
||||
|
||||
starting in the top level directory, go to the folder "Buildroot/shared/Sublime"
|
||||
copy the folder "auto_build_sublime_menu" and contents to:
|
||||
Windows
|
||||
\Users\your_user_name\AppData\Roaming\Sublime Text 3\Packages
|
||||
Linux
|
||||
/home/your_user_name/.config/sublime-text-3/Packages/User
|
||||
macOS (Click on the Finder's 'Go' menu and hold down Option to open...)
|
||||
~/Library/Application Support/Sublime Text 3/Packages/User
|
||||
|
||||
The menu should now be visible
|
||||
|
||||
If you get an error message that says "file not found" and "subprocess.Popen(['platformio' ... "
|
||||
then you'll need to add platformio to your path.
|
||||
macOS
|
||||
sudo nano /etc/paths
|
||||
add these to the bottom
|
||||
/Users/bob/.platformio
|
||||
/Users/bob/.platformio/penv/bin
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"caption": "Auto Build",
|
||||
"children": [
|
||||
{
|
||||
"caption": "PIO Build",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py build"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Clean",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py clean"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Upload",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py upload"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Upload (traceback)",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py traceback"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Upload using Programmer",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py program"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Test",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py test"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Debug",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py debug"
|
||||
}
|
||||
},
|
||||
{
|
||||
"caption": "PIO Remote",
|
||||
"command": "webdevshell",
|
||||
"args": {
|
||||
"command": "python buildroot/share/vscode/auto_build.py remote"
|
||||
}
|
||||
}
|
||||
],
|
||||
"id": "AutoBuild",
|
||||
"mnemonic": "A"
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,143 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Builds custom upload command
|
||||
# 1) Run platformio as a subprocess to find a COM port
|
||||
# 2) Build the upload command
|
||||
# 3) Exit and let upload tool do the work
|
||||
#
|
||||
# This script runs between completion of the library/dependencies installation and compilation.
|
||||
#
|
||||
# Will continue on if a COM port isn't found so that the compilation can be done.
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import subprocess, os, platform
|
||||
from SCons.Script import DefaultEnvironment
|
||||
|
||||
current_OS = platform.system()
|
||||
|
||||
env = DefaultEnvironment()
|
||||
|
||||
build_type = os.environ.get("BUILD_TYPE", 'Not Set')
|
||||
|
||||
|
||||
if not(build_type == 'upload' or build_type == 'traceback' or build_type == 'Not Set') :
|
||||
env.Replace(UPLOAD_PROTOCOL = 'teensy-gui') # run normal Teensy2 scripts
|
||||
else:
|
||||
com_first = ''
|
||||
com_last = ''
|
||||
com_CDC = ''
|
||||
description_first = ''
|
||||
description_last = ''
|
||||
description_CDC = ''
|
||||
|
||||
#
|
||||
# grab the first com port that pops up unless we find one we know for sure
|
||||
# is a CDC device
|
||||
#
|
||||
def get_com_port(com_search_text, descr_search_text, start):
|
||||
|
||||
global com_first
|
||||
global com_last
|
||||
global com_CDC
|
||||
global description_first
|
||||
global description_last
|
||||
global description_CDC
|
||||
|
||||
|
||||
print('\nLooking for Serial Port\n')
|
||||
|
||||
# stream output from subprocess and split it into lines
|
||||
pio_subprocess = subprocess.Popen(['platformio', 'device', 'list'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
looking_for_description = False
|
||||
for line in iter(pio_subprocess.stdout.readline, ''):
|
||||
if 0 <= line.find(com_search_text):
|
||||
looking_for_description = True
|
||||
com_last = line.replace('\n', '')
|
||||
if com_first == '':
|
||||
com_first = com_last
|
||||
if 0 <= line.find(descr_search_text) and looking_for_description:
|
||||
looking_for_description = False
|
||||
description_last = line[ start : ]
|
||||
if description_first == '':
|
||||
description_first = description_last
|
||||
if 0 <= description_last.find('CDC'):
|
||||
com_CDC = com_last
|
||||
description_CDC = description_last
|
||||
|
||||
if com_CDC == '' and com_first != '':
|
||||
com_CDC = com_first
|
||||
description_CDC = description_first
|
||||
elif com_CDC == '':
|
||||
com_CDC = 'COM_PORT_NOT_FOUND'
|
||||
|
||||
while 0 <= com_CDC.find('\n'):
|
||||
com_CDC = com_CDC.replace('\n', '')
|
||||
while 0 <= com_CDC.find('\r'):
|
||||
com_CDC = com_CDC.replace('\r', '')
|
||||
|
||||
if com_CDC == 'COM_PORT_NOT_FOUND':
|
||||
print(com_CDC, '\n')
|
||||
else:
|
||||
print('FOUND: ', com_CDC)
|
||||
print('DESCRIPTION: ', description_CDC, '\n')
|
||||
|
||||
if current_OS == 'Windows':
|
||||
|
||||
get_com_port('COM', 'Hardware ID:', 13)
|
||||
|
||||
# avrdude_conf_path = env.get("PIOHOME_DIR") + '\\packages\\toolchain-atmelavr\\etc\\avrdude.conf'
|
||||
avrdude_conf_path = 'buildroot\\share\\vscode\\avrdude.conf'
|
||||
|
||||
avrdude_exe_path = 'buildroot\\share\\vscode\\avrdude_5.10.exe'
|
||||
|
||||
# source_path = env.get("PROJECTBUILD_DIR") + '\\' + env.get("PIOENV") + '\\firmware.hex'
|
||||
source_path = '.pio\\build\\' + env.get("PIOENV") + '\\firmware.hex'
|
||||
|
||||
upload_string = avrdude_exe_path + ' -p usb1286 -c avr109 -P ' + com_CDC + ' -U flash:w:' + source_path + ':i'
|
||||
|
||||
|
||||
if current_OS == 'Darwin': # MAC
|
||||
|
||||
get_com_port('usbmodem', 'Description:', 13)
|
||||
|
||||
# avrdude_conf_path = env.get("PIOHOME_DIR") + '/packages/toolchain-atmelavr/etc/avrdude.conf'
|
||||
avrdude_conf_path = 'buildroot/share/vscode/avrdude_macOS.conf'
|
||||
|
||||
|
||||
avrdude_exe_path = 'buildroot/share/vscode/avrdude_5.10_macOS'
|
||||
|
||||
# source_path = env.get("PROJECTBUILD_DIR") + '/' + env.get("PIOENV") + '/firmware.hex'
|
||||
source_path = '.pio/build/' + env.get("PIOENV") + '/firmware.hex'
|
||||
|
||||
|
||||
# upload_string = 'avrdude -p usb1286 -c avr109 -P ' + com_CDC + ' -U flash:w:' + source_path + ':i'
|
||||
upload_string = avrdude_exe_path + ' -p usb1286 -c avr109 -P ' + com_CDC + ' -C ' + avrdude_conf_path + ' -U flash:w:' + source_path + ':i'
|
||||
print('upload_string: ', upload_string)
|
||||
|
||||
|
||||
|
||||
if current_OS == 'Linux':
|
||||
|
||||
get_com_port('/dev/tty', 'Description:', 13)
|
||||
|
||||
# avrdude_conf_path = env.get("PIOHOME_DIR") + '/packages/toolchain-atmelavr/etc/avrdude.conf'
|
||||
avrdude_conf_path = 'buildroot/share/vscode/avrdude_linux.conf'
|
||||
|
||||
|
||||
avrdude_exe_path = 'buildroot/share/vscode/avrdude_5.10_linux'
|
||||
# source_path = env.get("PROJECTBUILD_DIR") + '/' + env.get("PIOENV") + '/firmware.hex'
|
||||
source_path = '.pio/build/' + env.get("PIOENV") + '/firmware.hex'
|
||||
|
||||
# upload_string = 'avrdude -p usb1286 -c avr109 -P ' + com_CDC + ' -U flash:w:' + source_path + ':i'
|
||||
upload_string = avrdude_exe_path + ' -p usb1286 -c avr109 -P ' + com_CDC + ' -C ' + avrdude_conf_path + ' -U flash:w:' + source_path + ':i'
|
||||
|
||||
|
||||
env.Replace(
|
||||
UPLOADCMD = upload_string,
|
||||
MAXIMUM_RAM_SIZE = 8192,
|
||||
MAXIMUM_SIZE = 130048
|
||||
)
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
#
|
||||
# Builds custom upload command
|
||||
# 1) Run platformio as a subprocess to find a COM port
|
||||
# 2) Build the upload command
|
||||
# 3) Exit and let upload tool do the work
|
||||
#
|
||||
# This script runs between completion of the library/dependencies installation and compilation.
|
||||
#
|
||||
# Will continue on if a COM port isn't found so that the compilation can be done.
|
||||
#
|
||||
|
||||
import os, platform
|
||||
from SCons.Script import DefaultEnvironment
|
||||
|
||||
current_OS = platform.system()
|
||||
|
||||
env = DefaultEnvironment()
|
||||
|
||||
build_type = os.environ.get("BUILD_TYPE", 'Not Set')
|
||||
if not(build_type == 'upload' or build_type == 'traceback' or build_type == 'Not Set') :
|
||||
env.Replace(UPLOAD_PROTOCOL = 'teensy-gui') # run normal Teensy2 scripts
|
||||
else:
|
||||
|
||||
if current_OS == 'Windows':
|
||||
avrdude_conf_path = env.get("PIOHOME_DIR") + '\\packages\\toolchain-atmelavr\\etc\\avrdude.conf'
|
||||
|
||||
source_path = env.get("PROJECTBUILD_DIR") + '\\' + env.get("PIOENV") + '\\firmware.hex'
|
||||
|
||||
upload_string = 'avrdude -p usb1286 -c flip1 -C ' + avrdude_conf_path + ' -U flash:w:' + source_path + ':i'
|
||||
|
||||
else:
|
||||
source_path = env.get("PROJECTBUILD_DIR") + '/' + env.get("PIOENV") + '/firmware.hex'
|
||||
|
||||
upload_string = 'avrdude -p usb1286 -c flip1 -U flash:w:' + source_path + ':i'
|
||||
|
||||
|
||||
env.Replace(
|
||||
UPLOADCMD = upload_string,
|
||||
MAXIMUM_RAM_SIZE = 8192,
|
||||
MAXIMUM_SIZE = 130048
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue