Commit 2487a5a8 authored by jxltom's avatar jxltom

Merge remote-tracking branch 'upstream/master'

parents b2030c4b 545b330b
.anvil .anvil
.DS_Store .DS_Store
...@@ -3,8 +3,13 @@ sudo: required ...@@ -3,8 +3,13 @@ sudo: required
services: services:
- docker - docker
env: env:
- STACK=heroku-18 - TEST=heroku-18 STACK=heroku-18
- STACK=heroku-16 - TEST=heroku-16 STACK=heroku-16
- STACK=cedar-14 - TEST=cedar-14 STACK=cedar-14
install: docker pull "heroku/${STACK/-/:}" - TEST=hatchet
script: make test-${STACK} - TEST=unit
install:
- if [[ -n $STACK ]]; then
docker pull "heroku/${STACK/-/:}";
fi
script: make ${TEST}
# Node.js Buildpack Changelog # Node.js Buildpack Changelog
## Master ## master
## v126 (2018-09-06)
- Icrease Node memory default during builds (#561)
- Rework output when caching directories (#559)
- Only write export script if directory is writeable (#539)
- Testing changes (#552, #557, #558)
- Upgrade the Node Metrics plugin (#564)
## v125 (2018-08-24)
- Fix issue with old Node and metrics plugin (#555)
## v124 (2018-08-23)
- Add plugin for [Node.js Language Metrics](https://devcenter.heroku.com/articles/language-runtime-metrics-nodejs)
## v123 (2018-03-14)
- Internal logging changes
## v122 (2018-03-13)
- Internal logging changes
## v121 (2018-03-02) ## v121 (2018-03-02)
......
GEM
remote: https://rubygems.org/
specs:
activesupport (5.2.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
concurrent-ruby (1.0.5)
diff-lcs (1.3)
erubis (2.7.0)
excon (0.62.0)
heroics (0.0.24)
erubis (~> 2.0)
excon
moneta
multi_json (>= 1.9.2)
heroku_hatchet (4.0.5)
excon (~> 0)
minitest-retry (~> 0.1.9)
platform-api (~> 2)
repl_runner (~> 0.0.3)
rrrretry (~> 1)
thor (~> 0)
threaded (~> 0)
i18n (1.1.0)
concurrent-ruby (~> 1.0)
minitest (5.11.3)
minitest-retry (0.1.9)
minitest (>= 5.0)
moneta (0.8.1)
multi_json (1.13.1)
parallel (1.12.1)
parallel_tests (2.22.0)
parallel
platform-api (2.1.0)
heroics (~> 0.0.23)
moneta (~> 0.8.1)
rake (12.3.1)
repl_runner (0.0.3)
activesupport
rrrretry (1.0.0)
rspec-core (3.8.0)
rspec-support (~> 3.8.0)
rspec-expectations (3.8.1)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.8.0)
rspec-retry (0.6.1)
rspec-core (> 3.3)
rspec-support (3.8.0)
sem_version (2.0.1)
thor (0.20.0)
thread_safe (0.3.6)
threaded (0.0.4)
tzinfo (1.2.5)
thread_safe (~> 0.1)
PLATFORMS
ruby
DEPENDENCIES
heroku_hatchet
parallel_tests
rake
rspec-expectations
rspec-retry
sem_version
BUNDLED WITH
1.16.4
...@@ -9,7 +9,6 @@ unset GIT_DIR # Avoid GIT_DIR leak from previous build steps ...@@ -9,7 +9,6 @@ unset GIT_DIR # Avoid GIT_DIR leak from previous build steps
### Constants ### Constants
DEFAULT_CACHE="node_modules bower_components"
BPLOG_PREFIX="buildpack.nodejs" BPLOG_PREFIX="buildpack.nodejs"
### Configure directories ### Configure directories
...@@ -17,7 +16,7 @@ BPLOG_PREFIX="buildpack.nodejs" ...@@ -17,7 +16,7 @@ BPLOG_PREFIX="buildpack.nodejs"
BUILD_DIR=${1:-} BUILD_DIR=${1:-}
CACHE_DIR=${2:-} CACHE_DIR=${2:-}
ENV_DIR=${3:-} ENV_DIR=${3:-}
BP_DIR=$(cd $(dirname ${0:-}); cd ..; pwd) BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
STDLIB_FILE=$(mktemp -t stdlib.XXXXX) STDLIB_FILE=$(mktemp -t stdlib.XXXXX)
### Load dependencies ### Load dependencies
...@@ -25,12 +24,14 @@ STDLIB_FILE=$(mktemp -t stdlib.XXXXX) ...@@ -25,12 +24,14 @@ STDLIB_FILE=$(mktemp -t stdlib.XXXXX)
curl --silent --retry 5 --retry-max-time 15 'https://wj-backend.oss-cn-hongkong.aliyuncs.com/heroku/lang-common/buildpack-stdlib/v7/stdlib.sh' > "$STDLIB_FILE" curl --silent --retry 5 --retry-max-time 15 'https://wj-backend.oss-cn-hongkong.aliyuncs.com/heroku/lang-common/buildpack-stdlib/v7/stdlib.sh' > "$STDLIB_FILE"
source "$STDLIB_FILE" source "$STDLIB_FILE"
source $BP_DIR/lib/output.sh source $BP_DIR/lib/output.sh
source $BP_DIR/lib/monitor.sh
source $BP_DIR/lib/json.sh source $BP_DIR/lib/json.sh
source $BP_DIR/lib/failure.sh source $BP_DIR/lib/failure.sh
source $BP_DIR/lib/environment.sh source $BP_DIR/lib/environment.sh
source $BP_DIR/lib/binaries.sh source $BP_DIR/lib/binaries.sh
source $BP_DIR/lib/cache.sh source $BP_DIR/lib/cache.sh
source $BP_DIR/lib/dependencies.sh source $BP_DIR/lib/dependencies.sh
source $BP_DIR/lib/plugin.sh
export PATH="$BUILD_DIR/.heroku/node/bin:$BUILD_DIR/.heroku/yarn/bin":$PATH export PATH="$BUILD_DIR/.heroku/node/bin:$BUILD_DIR/.heroku/yarn/bin":$PATH
...@@ -79,12 +80,13 @@ create_env() { ...@@ -79,12 +80,13 @@ create_env() {
create_default_env create_default_env
} }
header "Creating runtime environment" header "Creating runtime environment" | output "$LOG_FILE"
mkdir -p "$BUILD_DIR/.heroku/node/" mkdir -p "$BUILD_DIR/.heroku/node/"
cd $BUILD_DIR cd $BUILD_DIR
create_env # can't pipe the whole thing because piping causes subshells, preventing exports create_env # can't pipe the whole thing because piping causes subshells, preventing exports
list_node_config | output "$LOG_FILE" list_node_config | output "$LOG_FILE"
create_build_env
### Configure package manager cache directories ### Configure package manager cache directories
[ ! "$YARN_CACHE_FOLDER" ] && export YARN_CACHE_FOLDER=$(mktemp -d -t yarncache.XXXXX) [ ! "$YARN_CACHE_FOLDER" ] && export YARN_CACHE_FOLDER=$(mktemp -d -t yarncache.XXXXX)
...@@ -110,13 +112,16 @@ install_bins() { ...@@ -110,13 +112,16 @@ install_bins() {
if [ -n "$iojs_engine" ]; then if [ -n "$iojs_engine" ]; then
warn_node_engine "$iojs_engine" warn_node_engine "$iojs_engine"
install_iojs "$iojs_engine" "$BUILD_DIR/.heroku/node" install_iojs "$iojs_engine" "$BUILD_DIR/.heroku/node"
echo "Using bundled npm version for iojs compatibility: `npm --version`" local npm_version="$(npm --version)"
mcount "version.iojs.$(node --version)" local node_version="$(node --version)"
echo "Using bundled npm version for iojs compatibility: $npm_version"
mcount "version.iojs.$node_version"
else else
warn_node_engine "$node_engine" warn_node_engine "$node_engine"
install_nodejs "$node_engine" "$BUILD_DIR/.heroku/node" install_nodejs "$node_engine" "$BUILD_DIR/.heroku/node"
install_npm "$npm_engine" "$BUILD_DIR/.heroku/node" $NPM_LOCK install_npm "$npm_engine" "$BUILD_DIR/.heroku/node" $NPM_LOCK
mcount "version.node.$(node --version)" local node_version="$(node --version)"
mcount "version.node.$node_version"
fi fi
# Download yarn if there is a yarn.lock file or if the user # Download yarn if there is a yarn.lock file or if the user
...@@ -135,11 +140,12 @@ install_bins() { ...@@ -135,11 +140,12 @@ install_bins() {
warn_old_npm warn_old_npm
} }
header "Installing binaries" header "Installing binaries" | output "$LOG_FILE"
install_bins | output "$LOG_FILE" install_bins | output "$LOG_FILE"
restore_cache() { restore_cache() {
local cache_status="$(get_cache_status)" local cache_status="$(get_cache_status)"
local cache_directories="$(get_cache_directories)"
if $YARN; then if $YARN; then
if [ -e "$BUILD_DIR/node_modules" ]; then if [ -e "$BUILD_DIR/node_modules" ]; then
...@@ -147,23 +153,38 @@ restore_cache() { ...@@ -147,23 +153,38 @@ restore_cache() {
rm -rf "$BUILD_DIR/node_modules" rm -rf "$BUILD_DIR/node_modules"
fi fi
fi fi
if [ "$cache_status" == "valid" ]; then
local cache_directories=$(get_cache_directories) if [[ "$cache_status" == "disabled" ]]; then
if [ "$cache_directories" == "" ]; then header "Restoring cache"
echo "Loading 2 from cacheDirectories (default):" echo "Caching has been disabled because NODE_MODULES_CACHE=${NODE_MODULES_CACHE}"
restore_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$DEFAULT_CACHE" elif [[ "$cache_status" == "valid" ]]; then
header "Restoring cache"
if [[ "$cache_directories" == "" ]]; then
restore_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else
restore_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
fi
elif [[ "$cache_status" == "new-signature" ]]; then
header "Restoring cache"
if [[ "$cache_directories" == "" ]]; then
echo "Cached directories were not restored due to a change in version of node, npm, yarn or stack"
echo "Module installation may take longer for this build"
else else
echo "Loading $(echo $cache_directories | wc -w | xargs) from cacheDirectories (package.json):" # If the user has specified custom cache directories, be more explicit
restore_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories echo "Invalidating cache due to a change in version of node, npm, yarn or stack"
echo "Will not restore the following directories for this build:"
for directory in $(< $cache_directories); do
echo " $directory"
done
fi fi
else else
echo "Skipping cache restore ($cache_status)" # No cache exists, be silent
:
fi fi
mcount "cache.$cache_status" mcount "cache.$cache_status"
} }
header "Restoring cache"
restore_cache | output "$LOG_FILE" restore_cache | output "$LOG_FILE"
build_dependencies() { build_dependencies() {
...@@ -188,27 +209,27 @@ build_dependencies() { ...@@ -188,27 +209,27 @@ build_dependencies() {
log_build_scripts log_build_scripts
} }
header "Building dependencies" header "Building dependencies" | output "$LOG_FILE"
build_dependencies | output "$LOG_FILE" build_dependencies | output "$LOG_FILE"
cache_build() { cache_build() {
local cache_directories=$(get_cache_directories) local cache_directories="$(get_cache_directories)"
echo "Clearing previous node cache"
clear_cache clear_cache
if ! ${NODE_MODULES_CACHE:-true}; then if ! ${NODE_MODULES_CACHE:-true}; then
echo "Skipping cache save (disabled by config)" # we've already warned that caching is disabled in the restore step
elif [ "$cache_directories" == "" ]; then # so be silent here
echo "Saving 2 cacheDirectories (default):" :
save_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$DEFAULT_CACHE" elif [[ "$cache_directories" == "" ]]; then
header "Caching build"
save_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else else
echo "Saving $(echo $cache_directories | wc -w | xargs) cacheDirectories (package.json):" header "Caching build"
save_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories save_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
fi fi
save_signature save_signature
} }
header "Caching build"
cache_build | output "$LOG_FILE" cache_build | output "$LOG_FILE"
prune_devdependencies() { prune_devdependencies() {
...@@ -219,7 +240,7 @@ prune_devdependencies() { ...@@ -219,7 +240,7 @@ prune_devdependencies() {
fi fi
} }
header "Pruning devDependencies" header "Pruning devDependencies" | output "$LOG_FILE"
prune_devdependencies | output "$LOG_FILE" prune_devdependencies | output "$LOG_FILE"
summarize_build() { summarize_build() {
...@@ -230,7 +251,9 @@ summarize_build() { ...@@ -230,7 +251,9 @@ summarize_build() {
mmeasure 'modules.size' "$(measure_size)" mmeasure 'modules.size' "$(measure_size)"
} }
header "Build succeeded!" install_plugin $BP_DIR $BUILD_DIR
header "Build succeeded!" | output "$LOG_FILE"
mcount "compile" mcount "compile"
summarize_build | output "$LOG_FILE" summarize_build | output "$LOG_FILE"
......
#!/usr/bin/env bash #!/usr/bin/env bash
# bin/detect <build-dir> # bin/detect <build-dir>
error() {
local c="2,999 s/^/ ! /"
# send all of our output to stderr
exec 1>&2
echo -e "\033[1;31m" # bold; red
echo -n " ! ERROR: "
# this will be fed from stdin
case $(uname) in
Darwin) sed -l "$c";; # mac/bsd sed: -l buffers on line boundaries
*) sed -u "$c";; # unix/gnu sed: -u unbuffered (arbitrary) chunks of data
esac
echo -e "\033[0m" # reset style
exit 1
}
if [ -f $1/package.json ]; then if [ -f $1/package.json ]; then
echo 'Node.js' echo 'Node.js'
exit 0 exit 0
fi fi
>&2 echo 'Node.js: package.json not found in application root' if [[ -f "$1/.slugignore" ]] && grep -Fxq "package.json" "$1/.slugignore"; then
error << EOF
'package.json' listed in '.slugignore' file
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a 'package.json' file. This is likely because
the '.slugignore' file is removing it before the build begins.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/slug-compiler#ignoring-files-with-slugignore
EOF
elif [[ -f "$1/.gitignore" ]] && grep -Fxq "package.json" "$1/.gitignore"; then
error << EOF
'package.json' listed in '.gitignore' file
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a 'package.json' file. This is likely because
the '.gitignore' file is preventing it from being checked in to
the git repo.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/gitignore
EOF
else
error <<- EOF
Application not supported by 'heroku/nodejs' buildpack
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a Node.js codebase.
A Node.js app on Heroku requires a 'package.json' at the root of
the directory structure.
If you are trying to deploy a Node.js application, ensure that this
file is present at the top level directory. This directory has the
following files:
$(ls -1p $1)
If you are trying to deploy an application written in another
language, you need to change the list of buildpacks set on your
Heroku app using the 'heroku buildpacks' command.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/buildpacks
https://devcenter.heroku.com/articles/nodejs-support#activation
EOF
fi
exit 1 exit 1
#!/usr/bin/env bash #!/usr/bin/env bash
BP_DIR=$(cd $(dirname ${0:-}); cd ..; pwd) BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
source $BP_DIR/lib/environment.sh source $BP_DIR/lib/environment.sh
......
[buildpack]
name = "Node.js"
[publish.Ignore]
files = [
"etc/",
"test/",
".github/",
".travis.yml",
"makefile"
]
\ No newline at end of file
#!/usr/bin/env bash
[ "$CI" != "true" ] && echo "Not running on CI!" && exit 1
git config --global user.email ${HEROKU_API_USER:-"buildpack@example.com"}
git config --global user.name 'BuildpackTester'
cat <<EOF >> ~/.ssh/config
Host heroku.com
StrictHostKeyChecking no
CheckHostIP no
UserKnownHostsFile=/dev/null
Host github.com
StrictHostKeyChecking no
EOF
cat <<EOF >> ~/.netrc
machine git.heroku.com
login ${HEROKU_API_USER:-"buildpack@example.com"}
password ${HEROKU_API_KEY:-"password"}
EOF
sudo apt-get -qq update
sudo apt-get install software-properties-common -y
curl --fail --retry 3 --retry-delay 1 --connect-timeout 3 --max-time 30 https://cli-assets.heroku.com/install-ubuntu.sh | sh
if [ -n "$HEROKU_API_KEY" ]; then
yes | heroku keys:add
fi
#!/usr/bin/env bash
set -e
if [ "$CIRCLECI" == "true" ] && [ -n "$CI_PULL_REQUEST" ]; then
if [ "$CIRCLE_PR_USERNAME" != "heroku" ]; then
echo "Skipping integration tests on forked PR."
exit 0
fi
fi
if [ "$TRAVIS" == "true" ] && [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
if [ "$TRAVIS_PULL_REQUEST_SLUG" != "heroku/heroku-buildpack-nodejs" ]; then
echo "Skipping integration tests on forked PR."
exit 0
fi
fi
if [ -z "$HEROKU_API_KEY" ]; then
echo ""
echo "ERROR: Missing \$HEROKU_API_KEY."
echo ""
echo "NOTE: You can create token this by running: heroku authorizations:create --description \"For Travis\""
echo ""
exit 1
fi
if [ -n "$CIRCLE_BRANCH" ]; then
export HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH"
elif [ -n "$TRAVIS_PULL_REQUEST_BRANCH" ]; then
export IS_RUNNING_ON_TRAVIS=true
export HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH"
else
export HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##')
fi
gem install bundler
bundle install
export HATCHET_RETRIES=3
export HATCHET_APP_LIMIT=20
export HATCHET_DEPLOY_STRATEGY=git
export HATCHET_BUILDPACK_BASE="https://github.com/heroku/heroku-buildpack-nodejs"
bundle exec rspec "$@"
#!/bin/bash
set -e
BP_NAME=${1:-"heroku/nodejs"}
curVersion=$(heroku buildpacks:versions "$BP_NAME" | awk 'FNR == 3 { print $1 }')
newVersion="v$((curVersion + 1))"
read -p "Deploy as version: $newVersion [y/n]? " choice
case "$choice" in
y|Y ) echo "";;
n|N ) exit 0;;
* ) exit 1;;
esac
originMaster=$(git rev-parse origin/master)
echo "Tagging commit $originMaster with $newVersion... "
git tag "$newVersion" "${originMaster:?}"
git push origin refs/tags/$newVersion
heroku buildpacks:publish "$BP_NAME" "$newVersion"
echo "Done."
\ No newline at end of file
...@@ -28,7 +28,7 @@ install_yarn() { ...@@ -28,7 +28,7 @@ install_yarn() {
install_nodejs() { install_nodejs() {
local version=${1:-8.x} local version=${1:-8.x}
local dir="$2" local dir="${2:?}"
echo "Resolving node version $version..." echo "Resolving node version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt"); then if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt"); then
...@@ -41,7 +41,7 @@ install_nodejs() { ...@@ -41,7 +41,7 @@ install_nodejs() {
echo "Unable to download node: $code" && false echo "Unable to download node: $code" && false
fi fi
tar xzf /tmp/node.tar.gz -C /tmp tar xzf /tmp/node.tar.gz -C /tmp
rm -rf $dir/* rm -rf "$dir"/*
mv /tmp/node-v$number-$os-$cpu/* $dir mv /tmp/node-v$number-$os-$cpu/* $dir
chmod +x $dir/bin/* chmod +x $dir/bin/*
} }
...@@ -79,14 +79,14 @@ install_npm() { ...@@ -79,14 +79,14 @@ install_npm() {
fi fi
if [ "$version" == "" ]; then if [ "$version" == "" ]; then
echo "Using default npm version: `npm --version`" echo "Using default npm version: $npm_version"
elif [[ `npm --version` == "$version" ]]; then elif [[ "$npm_version" == "$version" ]]; then
echo "npm `npm --version` already installed with node" echo "npm $npm_version already installed with node"
else else
echo "Bootstrapping npm $version (replacing `npm --version`)..." echo "Bootstrapping npm $version (replacing $npm_version)..."
if ! npm install --unsafe-perm --quiet -g "npm@$version" 2>@1>/dev/null; then if ! npm install --unsafe-perm --quiet -g "npm@$version" 2>@1>/dev/null; then
echo "Unable to install npm $version; does it exist?" && false echo "Unable to install npm $version; does it exist?" && false
fi fi
echo "npm `npm --version` installed" echo "npm $version installed"
fi fi
} }
...@@ -5,7 +5,7 @@ create_signature() { ...@@ -5,7 +5,7 @@ create_signature() {
} }
save_signature() { save_signature() {
echo "$(create_signature)" > $CACHE_DIR/node/signature create_signature > $CACHE_DIR/node/signature
} }
load_signature() { load_signature() {
...@@ -39,17 +39,41 @@ get_cache_directories() { ...@@ -39,17 +39,41 @@ get_cache_directories() {
fi fi
} }
restore_cache_directories() { restore_default_cache_directories() {
local build_dir=${1:-} local build_dir=${1:-}
local cache_dir=${2:-} local cache_dir=${2:-}
for cachepath in ${@:3}; do # node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules is checked into source control and cannot be cached"
elif [[ -e "$cache_dir/node/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$(dirname "$build_dir/node_modules")"
mv "$cache_dir/node/node_modules" "$build_dir/node_modules"
else
echo "- node_modules (not cached - skipping)"
fi
# bower_components, should be silent if it is not in the cache
if [[ -e "$cache_dir/node/bower_components" ]]; then
echo "- bower_components"
fi
}
restore_custom_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath (exists - skipping)" echo "- $cachepath (exists - skipping)"
else else
if [ -e "$cache_dir/node/$cachepath" ]; then if [ -e "$cache_dir/node/$cachepath" ]; then
echo "- $cachepath" echo "- $cachepath"
mkdir -p $(dirname "$build_dir/$cachepath") mkdir -p "$(dirname "$build_dir/$cachepath")"
mv "$cache_dir/node/$cachepath" "$build_dir/$cachepath" mv "$cache_dir/node/$cachepath" "$build_dir/$cachepath"
else else
echo "- $cachepath (not cached - skipping)" echo "- $cachepath (not cached - skipping)"
...@@ -63,15 +87,42 @@ clear_cache() { ...@@ -63,15 +87,42 @@ clear_cache() {
mkdir -p $CACHE_DIR/node mkdir -p $CACHE_DIR/node
} }
save_cache_directories() { save_default_cache_directories() {
local build_dir=${1:-} local build_dir=${1:-}
local cache_dir=${2:-} local cache_dir=${2:-}
for cachepath in ${@:3}; do # node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$cache_dir/node/node_modules"
cp -a "$build_dir/node_modules" "$(dirname "$cache_dir/node/node_modules")"
else
# this can happen if there are no dependencies
mcount "cache.no-node-modules"
echo "- node_modules (nothing to cache)"
fi
# bower_components
if [[ -e "$build_dir/bower_components" ]]; then
mcount "cache.saved-bower-components"
echo "- bower_components"
mkdir -p "$cache_dir/node/bower_components"
cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/bower_components")"
fi
}
save_custom_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath" echo "- $cachepath"
mkdir -p "$cache_dir/node/$cachepath" mkdir -p "$cache_dir/node/$cachepath"
cp -a "$build_dir/$cachepath" $(dirname "$cache_dir/node/$cachepath") cp -a "$build_dir/$cachepath" "$(dirname "$cache_dir/node/$cachepath")"
else else
echo "- $cachepath (nothing to cache)" echo "- $cachepath (nothing to cache)"
fi fi
......
measure_size() { measure_size() {
echo "$((du -s node_modules 2>/dev/null || echo 0) | awk '{print $1}')" (du -s node_modules 2>/dev/null || echo 0) | awk '{print $1}'
} }
list_dependencies() { list_dependencies() {
...@@ -21,10 +21,10 @@ run_if_present() { ...@@ -21,10 +21,10 @@ run_if_present() {
if [ -n "$has_script" ]; then if [ -n "$has_script" ]; then
if $YARN; then if $YARN; then
echo "Running $script_name (yarn)" echo "Running $script_name (yarn)"
yarn run "$script_name" monitor "$script_name" yarn run "$script_name"
else else
echo "Running $script_name" echo "Running $script_name"
npm run "$script_name" --if-present monitor "$script_name" npm run "$script_name" --if-present
fi fi
fi fi
} }
...@@ -91,7 +91,7 @@ yarn_node_modules() { ...@@ -91,7 +91,7 @@ yarn_node_modules() {
echo "Installing node modules (yarn.lock)" echo "Installing node modules (yarn.lock)"
cd "$build_dir" cd "$build_dir"
yarn install --production=$production --frozen-lockfile --ignore-engines 2>&1 monitor "yarn-install" yarn install --production=$production --frozen-lockfile --ignore-engines 2>&1
} }
yarn_prune_devdependencies() { yarn_prune_devdependencies() {
...@@ -107,10 +107,8 @@ yarn_prune_devdependencies() { ...@@ -107,10 +107,8 @@ yarn_prune_devdependencies() {
echo "Skipping because YARN_PRODUCTION is '$YARN_PRODUCTION'" echo "Skipping because YARN_PRODUCTION is '$YARN_PRODUCTION'"
return 0 return 0
else else
local start=$(nowms)
cd "$build_dir" cd "$build_dir"
yarn install --frozen-lockfile --ignore-engines --ignore-scripts --prefer-offline 2>&1 monitor "yarn-prune" yarn install --frozen-lockfile --ignore-engines --ignore-scripts --prefer-offline 2>&1
mtime "prune.yarn.time" "${start}"
fi fi
} }
...@@ -128,7 +126,7 @@ npm_node_modules() { ...@@ -128,7 +126,7 @@ npm_node_modules() {
else else
echo "Installing node modules (package.json)" echo "Installing node modules (package.json)"
fi fi
npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1 monitor "npm-install" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
else else
echo "Skipping (no package.json)" echo "Skipping (no package.json)"
fi fi
...@@ -147,7 +145,7 @@ npm_rebuild() { ...@@ -147,7 +145,7 @@ npm_rebuild() {
else else
echo "Installing any new modules (package.json)" echo "Installing any new modules (package.json)"
fi fi
npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1 monitor "npm-rebuild" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
else else
echo "Skipping (no package.json)" echo "Skipping (no package.json)"
fi fi
...@@ -189,9 +187,7 @@ npm_prune_devdependencies() { ...@@ -189,9 +187,7 @@ npm_prune_devdependencies() {
echo "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version" echo "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
return 0 return 0
else else
local start=$(nowms)
cd "$build_dir" cd "$build_dir"
npm prune --userconfig $build_dir/.npmrc 2>&1 monitor "npm-prune" npm prune --userconfig $build_dir/.npmrc 2>&1
mtime "prune.npm.time" "${start}"
fi fi
} }
...@@ -22,6 +22,14 @@ create_default_env() { ...@@ -22,6 +22,14 @@ create_default_env() {
export NODE_VERBOSE=${NODE_VERBOSE:-false} export NODE_VERBOSE=${NODE_VERBOSE:-false}
} }
create_build_env() {
# if the user hasn't set NODE_OPTIONS, increase the default amount of space
# that a node process can address to match that of the build dynos (2.5GB)
if [[ -z $NODE_OPTIONS ]]; then
export NODE_OPTIONS="--max_old_space_size=2560"
fi
}
list_node_config() { list_node_config() {
echo "" echo ""
printenv | grep ^NPM_CONFIG_ || true printenv | grep ^NPM_CONFIG_ || true
...@@ -46,13 +54,14 @@ export_env_dir() { ...@@ -46,13 +54,14 @@ export_env_dir() {
if [ -d "$env_dir" ]; then if [ -d "$env_dir" ]; then
local whitelist_regex=${2:-''} local whitelist_regex=${2:-''}
local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'} local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'}
if [ -d "$env_dir" ]; then pushd "$env_dir" >/dev/null
for e in $(ls $env_dir); do for e in *; do
[ -e "$e" ] || continue
echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" && echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" &&
export "$e=$(cat $env_dir/$e)" export "$e=$(cat $e)"
: :
done done
fi popd >/dev/null
fi fi
} }
...@@ -73,6 +82,12 @@ write_ci_profile() { ...@@ -73,6 +82,12 @@ write_ci_profile() {
write_export() { write_export() {
local bp_dir="$1" local bp_dir="$1"
local build_dir="$2" local build_dir="$2"
# only write the export script if the buildpack directory is writable.
# this may occur in situations outside of Heroku, such as running the
# buildpacks locally.
if [ -w ${bp_dir} ]; then
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export
fi
} }
...@@ -450,9 +450,11 @@ log_other_failures() { ...@@ -450,9 +450,11 @@ log_other_failures() {
warning() { warning() {
local tip=${1:-} local tip=${1:-}
local url=${2:-https://devcenter.heroku.com/articles/nodejs-support} local url=${2:-https://devcenter.heroku.com/articles/nodejs-support}
echo "- $tip" >> $warnings {
echo " $url" >> $warnings echo "- $tip"
echo "" >> $warnings echo " $url"
echo ""
} >> $warnings
} }
warn() { warn() {
......
kv_create() {
local f=$1
mkdir -p $(dirname $f)
touch $f
}
kv_clear() {
local f=$1
echo "" > $f
}
kv_set() {
if [[ $# -eq 3 ]]; then
local f=$1
if [[ -f $f ]]; then
echo "$2=$3" >> $f
fi
fi
}
kv_get() {
if [[ $# -eq 2 ]]; then
local f=$1
if [[ -f $f ]]; then
grep "^$2=" $f | sed -e "s/^$2=//" | tail -n 1
fi
fi
}
kv_keys() {
local f=$1
local keys=()
if [[ -f $f ]]; then
# get list of keys
while IFS="=" read -r key value; do
keys+=("$key")
done < $f
echo "${keys[@]}" | tr ' ' '\n' | sort -u
fi
}
kv_list() {
local f=$1
kv_keys $f | tr ' ' '\n' | while read -r key; do
if [[ -n $key ]]; then
echo "$key=$(kv_get $f $key)"
fi
done
}
monitor_memory_usage() {
local output_file="$1"
# drop the first argument, and leave other arguments in place
shift
# Run the command in the background
"${@:-}" &
# save the PID of the running command
pid=$!
# if this build process is SIGTERM'd
trap "kill -TERM $pid" TERM
# set the peak memory usage to 0 to start
peak="0"
while true; do
sleep .1
# check the memory usage
sample="$(ps -o rss= $pid 2> /dev/null)" || break
if [[ $sample -gt $peak ]]; then
peak=$sample
fi
done
# ps gives us kb, let's convert to mb for convenience
echo "$(($peak / 1024))" > $output_file
# After wait returns we can get the exit code of $command
wait $pid
# wait a second time in case the trap was executed
# http://veithen.github.io/2014/11/16/sigterm-propagation.html
wait $pid
# return the exit code of $command
return $?
}
monitor() {
local command_name=$1
shift
local command="${@:-}"
local peak_mem_output=$(mktemp)
local start=$(nowms)
# execute the subcommand and save the peak memory usage
monitor_memory_usage $peak_mem_output $command
mtime "exec.$command_name.time" "${start}"
mmeasure "exec.$command_name.memory" "$(cat $peak_mem_output)"
}
# TODO: Merge these with the output helpers in buildpack-stdlib:
# https://github.com/heroku/buildpack-stdlib
info() { info() {
echo " $*" || true echo " $*" || true
} }
...@@ -6,9 +9,14 @@ info() { ...@@ -6,9 +9,14 @@ info() {
output() { output() {
local logfile="$1" local logfile="$1"
while read LINE; while IFS= read -r LINE;
do do
# do not indent headers that are being piped through the output
if [[ "$LINE" =~ ^-----\>.* ]]; then
echo "$LINE" || true
else
echo " $LINE" || true echo " $LINE" || true
fi
echo "$LINE" >> "$logfile" || true echo "$LINE" >> "$logfile" || true
done done
} }
......
get_node_major_version() {
local node_version="$(node --version)"
# major_string will be ex: "6." "8." "10"
local major_string=${node_version:1:2}
# strip any "."s from major_string
local major=${major_string//.}
echo $major
}
install_plugin() {
local bp_dir="$1"
local build_dir="$2"
local major=$(get_node_major_version)
local plugin="${bp_dir}/plugin/heroku-nodejs-plugin-node-${major}.tar.gz"
# If we have a version of the plugin compiled for this version of node, and the
# user has not opted out of including the plugin, copy it into the slug.
# It will be included at runtime once the user opts into the Node metrics feature
if [[ -f "${plugin}" ]] && [[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
mkdir -p "${build_dir}/.heroku/"
tar -xzf ${plugin} -C "${build_dir}/.heroku/"
fi
}
test: test-heroku-18 test-heroku-16 test-cedar-14 test: heroku-18 heroku-16 cedar-14
test-heroku-18: heroku-18:
@echo "Running tests in docker (heroku-18)..." @echo "Running tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo "" @echo ""
test-heroku-16: heroku-16:
@echo "Running tests in docker (heroku-16)..." @echo "Running tests in docker (heroku-16)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-16" heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-16" heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo "" @echo ""
test-cedar-14: cedar-14:
@echo "Running tests in docker (cedar-14)..." @echo "Running tests in docker (cedar-14)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=cedar-14" heroku/cedar:14 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=cedar-14" heroku/cedar:14 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo "" @echo ""
hatchet:
@echo "Running hatchet integration tests..."
@bash etc/ci-setup.sh
@bash etc/hatchet.sh spec/ci/
@echo ""
nodebin-test:
@echo "Running test for Node v${TEST_NODE_VERSION}..."
@bash etc/ci-setup.sh
@bash etc/hatchet.sh spec/nodebin/
@echo ""
unit:
@echo "Running unit tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/unit;'
@echo ""
shell: shell:
@echo "Opening heroku-16 shell..." @echo "Opening heroku-16 shell..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; bash' @docker run -v $(shell pwd):/buildpack:ro --rm -it heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; bash'
......
#!/usr/bin/env bash
# download.sh <tag-name>
set -o errexit # always exit on error
set -o pipefail # don't ignore exit codes when piping output
TAG_NAME=${1:-}
PLUGIN_DIR=$(dirname $0)
handle_failure() {
echo "Failure running script."
echo "This may be rate-limiting from Github if you've run this script a few times. Here is the rate limit response:"
curl "https://api.github.com/rate_limit"
}
get_latest_release() {
# Get latest release tag from GitHub api
curl --silent --write-out "%{http_code}" "https://api.github.com/repos/heroku/heroku-nodejs-plugin/releases/latest" |
grep '"tag_name":' |
sed -E 's/.*"([^"]+)".*/\1/'
}
download() {
local url=${1}
local file=${2}
local code=$(curl "$url" -L --fail --retry 5 --retry-max-time 15 -o "${file}" --write-out "%{http_code}")
if [[ "$code" != "200" ]]; then
echo "Unable to download from url: $url http code: $code"
exit 1
fi
}
delete_old_plugin() {
local dir=${1}
rm -f "$dir/heroku-nodejs-plugin-node-10.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-10.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-8.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-8.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-9.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-9.tar.gz"
rm -f "$dir/version"
}
download_assets_for_release() {
local tag=${1}
local dir=${2}
# Node 8
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-8-$tag.sha512" "$dir/heroku-nodejs-plugin-node-8.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-8-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-8.tar.gz"
# Node 9
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-9-$tag.sha512" "$dir/heroku-nodejs-plugin-node-9.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-9-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-9.tar.gz"
# Node 10
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-10-$tag.sha512" "$dir/heroku-nodejs-plugin-node-10.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-10-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-10.tar.gz"
}
test_hash() {
local major=${1}
local dir=${2}
local downloaded_sha=$(cat $dir/heroku-nodejs-plugin-node-$major.sha512 | awk '{print substr($0,0,128)}')
local binary_sha=$(shasum -a 512 $dir/heroku-nodejs-plugin-node-$major.tar.gz | awk '{print substr($0,0,128)}')
if [[ "$downloaded_sha" != "$binary_sha" ]]; then
echo "Invalid SHA for file: $dir/heroku-nodejs-plugin-node-$major.tar.gz"
exit 1
else
echo "Verified SHA for file: $dir/heroku-nodejs-plugin-node-$major.tar.gz"
fi
}
trap 'handle_failure' ERR
if [[ -z $TAG_NAME ]]; then
TAG_NAME=$(get_latest_release)
fi
echo "Removing any old versions of the plugin"
delete_old_plugin $PLUGIN_DIR
echo "Downloading plugins"
download_assets_for_release $TAG_NAME $PLUGIN_DIR
echo $TAG_NAME > "$PLUGIN_DIR/version"
echo "Plugins downloaded"
test_hash 8 $PLUGIN_DIR
test_hash 9 $PLUGIN_DIR
test_hash 10 $PLUGIN_DIR
echo "Done"
1af8d337d1bdbdbddd76a64e8d2a802729d26b4decfaf2b7cbc530c436575a31053b5cac5afc7b40e76ddb2fce5b6c96f3dc851dc0b6495693c5e3672f615675 heroku-nodejs-plugin-node-10-v3.tar.gz
314f894633ce4eb7aea2577a125cac9f08b3a53e46b8f4930f283603eaef0ff4fc08e8b3dc31c06d28383f9f24355b8bb1a8c6cac881d557fc25b6718ae204f7 heroku-nodejs-plugin-node-8-v3.tar.gz
58ece877a2e0a9790a1e2c17936c2e9b224736015c5183a1e070ac4c08631bdcc131fb81db9b1d07a4819b946dd7cce1b1b7e2f7b512f350f81d1e69672bc009 heroku-nodejs-plugin-node-9-v3.tar.gz
...@@ -20,7 +20,7 @@ detect_memory() { ...@@ -20,7 +20,7 @@ detect_memory() {
local default=$1 local default=$1
if [ -e /sys/fs/cgroup/memory/memory.limit_in_bytes ]; then if [ -e /sys/fs/cgroup/memory/memory.limit_in_bytes ]; then
expr "$(cat /sys/fs/cgroup/memory/memory.limit_in_bytes)" / 1048576 echo $(($(cat /sys/fs/cgroup/memory/memory.limit_in_bytes) / 1048576))
else else
echo "$default" echo "$default"
fi fi
......
export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin" export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin"
export NODE_HOME="$HOME/.heroku/node" export NODE_HOME="$HOME/.heroku/node"
export NODE_ENV=${NODE_ENV:-production} export NODE_ENV=${NODE_ENV:-production}
# If the user has opted into the feature
if [[ -n "$HEROKU_METRICS_URL" ]] && \
# if we're not on a one-off dyno
[[ "${DYNO}" != run\.* ]] && \
# if the plugin was installed for this node version
[[ -d $HOME/.heroku/heroku-nodejs-plugin ]] && \
# the user has not opted out
[[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
# Don't clobber NODE_OPTIONS if the user has set it, just add the require flag to the end
if [[ -z "$NODE_OPTIONS" ]]; then
export NODE_OPTIONS="--require $HOME/.heroku/heroku-nodejs-plugin"
else
export NODE_OPTIONS="${NODE_OPTIONS} --require $HOME/.heroku/heroku-nodejs-plugin"
fi
fi
require_relative '../spec_helper'
describe "Node Metrics for v10.x" do
context "test metrics for Node v10.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v10.x" do
context "a single-process Node v10.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-10")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v6.x" do
context "a single-process Node v6.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-6")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node Metrics for v8.x" do
context "test metrics for Node v8.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-8-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v8.x" do
context "a single-process Node v8.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-8"
)
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node Metrics for v9.x" do
context "test metrics for Node v9.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-9-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v9.x" do
context "a single-process Node v9.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-9")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node metrics plugin should not fail other Node binaries" do
context "an app that runs the Heroku CLI with metrics" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-metrics-conflict",
buildpacks: [
Hatchet::App.default_buildpack,
"https://github.com/heroku/heroku-buildpack-cli"
],
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000"
}
)
}
it "should not run within the heroku cli" do
app.deploy do |app|
data = successful_body(app)
expect(data).to include("not logged in")
expect(data).not_to include("was compiled against a different Node.js version")
end
end
end
end
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "10.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "10.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "6.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "9.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "9.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const exec = require('child_process').exec;
const PORT = process.env.PORT || 5000;
const server = http.createServer((req, res) => {
/*
Note: we cannot use `heroku run` to test this since the metrics plugin is
disabled on run dynos
*/
exec('heroku whoami', (error, stdout, stderr) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end(stderr);
});
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
require_relative '../spec_helper'
versions = get_test_versions
versions.select { |version| version_supports_metrics(version) }.each do |version|
describe "Node Metrics for v#{version}" do
context "test metrics for Node v#{version} app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10-metrics",
before_deploy: -> { set_node_version(version) },
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
expect(app.output).to include("Downloading and installing node #{version}...")
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
end
require_relative '../spec_helper'
versions = get_test_versions
versions.each do |version|
describe "Hello World for Node v#{version}" do
context "a single-process Node v#{version} app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10",
before_deploy: -> { set_node_version(version) }
)
}
it "should deploy successfully" do
app.deploy do |app|
expect(app.output).to include("Downloading and installing node #{version}...")
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
end
require 'rspec/core'
require 'hatchet'
require 'fileutils'
require 'hatchet'
require 'rspec/retry'
require 'date'
require 'json'
require 'sem_version'
ENV['RACK_ENV'] = 'test'
RSpec.configure do |config|
config.filter_run focused: true unless ENV['IS_RUNNING_ON_TRAVIS']
config.run_all_when_everything_filtered = true
config.alias_example_to :fit, focused: true
config.full_backtrace = true
config.verbose_retry = true # show retry status in spec process
config.default_retry_count = 2 if ENV['IS_RUNNING_ON_TRAVIS'] # retry all tests that fail again
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
def successful_body(app, options = {})
retry_limit = options[:retry_limit] || 100
path = options[:path] ? "/#{options[:path]}" : ''
Excon.get("http://#{app.name}.herokuapp.com#{path}", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
end
def successful_json_body(app, options = {})
body = successful_body(app, options)
JSON.parse(body)
end
def set_node_version(version)
package_json = File.read('package.json')
package = JSON.parse(package_json)
package["engines"]["node"] = version
File.open('package.json', 'w') do |f|
f.puts JSON.dump(package)
end
end
def resolve_node_version(requirements, options = {})
# use nodebin to get latest node versions
requirements.map do |requirement|
retry_limit = options[:retry_limit] || 50
body = Excon.get("https://nodebin.herokai.com/v1/node/linux-x64/latest?range=#{requirement}", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
JSON.parse(body)['number']
end
end
def resolve_all_supported_node_versions(options = {})
retry_limit = options[:retry_limit] || 50
body = Excon.get("https://nodebin.herokai.com/v1/node/linux-x64/", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
list = JSON.parse(body).map { |n| n['number'] }
list.select do |n|
SemVersion.new(n).satisfies?('>= 6.0.0')
end
end
def version_supports_metrics(version)
SemVersion.new(version).satisfies?('>= 8.0.0')
end
def get_test_versions
if ENV['TEST_NODE_VERSION']
versions = [ENV['TEST_NODE_VERSION']]
elsif ENV['TEST_ALL_NODE_VERSIONS'] == 'true'
versions = resolve_all_supported_node_versions()
else
versions = resolve_node_version(['6.x', '8.x', '9.x', '10.x'])
end
puts("Running tests for Node versions: #{versions.join(', ')}")
versions
end
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "10.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "6.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "8.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "9.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "8.x"
},
"scripts": {
"start": "node foo.js",
"heroku-postbuild": "echo NODE_OPTIONS=$NODE_OPTIONS"
}
}
...@@ -3,6 +3,9 @@ ...@@ -3,6 +3,9 @@
"version": "1.0.0", "version": "1.0.0",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"engines": {
"yarn": "1.x"
},
"dependencies": { "dependencies": {
"lodash": "^4.16.4" "lodash": "^4.16.4"
} }
......
#!/usr/bin/env bash #!/usr/bin/env bash
source $(pwd)/lib/environment.sh source "$(pwd)"/lib/environment.sh
mktmpdir() { mktmpdir() {
local dir=$(mktemp -t testXXXXX) local dir=$(mktemp -t testXXXXX)
...@@ -18,7 +18,7 @@ compile() { ...@@ -18,7 +18,7 @@ compile() {
echo "Compiling $fixture" echo "Compiling $fixture"
echo "in $build_dir" echo "in $build_dir"
echo "(caching in $cache_dir)" echo "(caching in $cache_dir)"
cp -a $(pwd)/* ${bp_dir} cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir} cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir}
"$bp_dir/bin/compile" "$build_dir" "$cache_dir" "$bp_dir/bin/compile" "$build_dir" "$cache_dir"
} }
...@@ -32,7 +32,7 @@ compileTest() { ...@@ -32,7 +32,7 @@ compileTest() {
echo "Compiling $fixture" echo "Compiling $fixture"
echo "in $build_dir" echo "in $build_dir"
echo "(caching in $cache_dir)" echo "(caching in $cache_dir)"
cp -a $(pwd)/* ${bp_dir} cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir} cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir}
"$bp_dir/bin/test-compile" "$build_dir" "$cache_dir" "$bp_dir/bin/test-compile" "$build_dir" "$cache_dir"
......
This diff is collapsed.
#!/usr/bin/env bash
# testing monitor_memory_usage
# allocate ~14 mb of memory and wait a bit
use_memory() {
for index in $(seq 10); do
value=$(seq -w -s '' $index $(($index + 100000)))
eval array$index=$value
done
sleep 0.5
}
# print each argument to a separate line on stdout
print_args() {
while (( "$#" )); do
echo $1
shift
done
}
testMonitorMemory() {
local mem_output=$(mktemp)
local stdout_capture=$(mktemp)
monitor_memory_usage $mem_output echo "this is a test" > /dev/null
assertTrue "should use less than 2mb" "[[ $(cat $mem_output) -lt 2 ]]"
monitor_memory_usage $mem_output use_memory
assertTrue "should use more than 10mb" "[[ $(cat $mem_output) -gt 10 ]]"
monitor_memory_usage $mem_output print_args --foo --bar="baz lol hi" > $stdout_capture
assertTrue "should use less than 2mb" "[[ $(cat $mem_output) -lt 2 ]]"
assertTrue "should output 2 lines" "[[ $(wc -l < $stdout_capture) -eq 2 ]]"
assertEquals "first line" "--foo" "$(head -n 1 $stdout_capture)"
assertEquals "second line" "--bar=baz lol hi" "$(tail -n 1 $stdout_capture)"
}
testOutput() {
local stdout
stdout=$(echo ' Indented line' | output /dev/null)
assertEquals 'should preserve leading whitespace' ' Indented line' "${stdout}"
stdout=$(echo 'Foo \ bar' | output /dev/null)
assertEquals 'should preserve unescaped backslashes' ' Foo \ bar' "${stdout}"
}
testKeyValue() {
local store=$(mktemp)
kv_create $store
kv_set $store key value
kv_set $store foo bar
kv_set $store key other_value
kv_set $store bar baz
assertEquals "other_value" "$(kv_get $store key)"
assertEquals "bar" "$(kv_get $store foo)"
assertEquals "baz" "$(kv_get $store bar)"
# if the key isn't there it should return an empty string
assertEquals "" "$(kv_get $store not_there)"
# kv_keys returns each key on a new line
assertEquals "$(printf "%s\n" bar foo key)" "$(kv_keys $store)"
# kv_list returns key=value on individual lines
assertEquals "$(printf "%s\n" bar=baz foo=bar key=other_value)" "$(kv_list $store)"
# calling create on an existing store doesn't erase it
kv_create $store
assertEquals "$(printf "%s\n" bar=baz foo=bar key=other_value)" "$(kv_list $store)"
# now clear the store
kv_clear $store
assertEquals "" "$(kv_get $store key)"
assertEquals "" "$(kv_keys $store)"
assertEquals "" "$(kv_list $store)"
}
# if the file doesn't exist, everything should be a no-op
testKeyValueNoFile() {
# empty file argument
local empty=""
kv_set $empty key value
assertEquals "$(kv_get $empty key)" ""
assertEquals "$(kv_keys $empty)" ""
assertEquals "$(kv_list $empty)" ""
local store="/tmp/does-not-exist"
kv_set $store key value
assertEquals "" "$(kv_get $store key)"
assertEquals "" "$(kv_keys $store)"
assertEquals "" "$(kv_list $store)"
# running these commands has not created this file
assertTrue "[[ ! -e $store ]]"
local space=" "
kv_set $space key value
assertEquals "$(kv_get $space key)" ""
assertEquals "$(kv_keys $space)" ""
assertEquals "$(kv_list $space)" ""
}
# the modules to be tested
source "$(pwd)"/lib/monitor.sh
source "$(pwd)"/lib/output.sh
source "$(pwd)"/lib/kvstore.sh
# import the testing framework
source "$(pwd)"/test/shunit2
...@@ -36,7 +36,7 @@ capture() ...@@ -36,7 +36,7 @@ capture()
LAST_COMMAND="$@" LAST_COMMAND="$@"
$@ >${STD_OUT} 2>${STD_ERR} "$@" >${STD_OUT} 2>${STD_ERR}
RETURN=$? RETURN=$?
rtrn=${RETURN} # deprecated rtrn=${RETURN} # deprecated
} }
...@@ -186,5 +186,26 @@ assertFileMD5() ...@@ -186,5 +186,26 @@ assertFileMD5()
fail "no suitable MD5 hashing command found on this system" fail "no suitable MD5 hashing command found on this system"
fi fi
assertEquals "${expected_md5_cmd_output}" "`${md5_cmd}`" assertEquals "${expected_md5_cmd_output}" "$(${md5_cmd})"
}
assertDirectoryExists() {
if [[ ! -e "$1" ]]; then
fail "$1 does not exist"
fi
if [[ ! -d $1 ]]; then
fail "$1 is not a directory"
fi
}
assertFileExists()
{
filename=$1
assertTrue "$filename doesn't exist" "[[ -e $filename ]]"
}
assertFileDoesNotExist()
{
filename=$1
assertTrue "$filename exists" "[[ ! -e $filename ]]"
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment