Commit 2487a5a8 authored by jxltom's avatar jxltom

Merge remote-tracking branch 'upstream/master'

parents b2030c4b 545b330b
......@@ -3,8 +3,13 @@ sudo: required
services:
- docker
env:
- STACK=heroku-18
- STACK=heroku-16
- STACK=cedar-14
install: docker pull "heroku/${STACK/-/:}"
script: make test-${STACK}
- TEST=heroku-18 STACK=heroku-18
- TEST=heroku-16 STACK=heroku-16
- TEST=cedar-14 STACK=cedar-14
- TEST=hatchet
- TEST=unit
install:
- if [[ -n $STACK ]]; then
docker pull "heroku/${STACK/-/:}";
fi
script: make ${TEST}
# Node.js Buildpack Changelog
## Master
## master
## v126 (2018-09-06)
- Icrease Node memory default during builds (#561)
- Rework output when caching directories (#559)
- Only write export script if directory is writeable (#539)
- Testing changes (#552, #557, #558)
- Upgrade the Node Metrics plugin (#564)
## v125 (2018-08-24)
- Fix issue with old Node and metrics plugin (#555)
## v124 (2018-08-23)
- Add plugin for [Node.js Language Metrics](https://devcenter.heroku.com/articles/language-runtime-metrics-nodejs)
## v123 (2018-03-14)
- Internal logging changes
## v122 (2018-03-13)
- Internal logging changes
## v121 (2018-03-02)
......
GEM
remote: https://rubygems.org/
specs:
activesupport (5.2.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
concurrent-ruby (1.0.5)
diff-lcs (1.3)
erubis (2.7.0)
excon (0.62.0)
heroics (0.0.24)
erubis (~> 2.0)
excon
moneta
multi_json (>= 1.9.2)
heroku_hatchet (4.0.5)
excon (~> 0)
minitest-retry (~> 0.1.9)
platform-api (~> 2)
repl_runner (~> 0.0.3)
rrrretry (~> 1)
thor (~> 0)
threaded (~> 0)
i18n (1.1.0)
concurrent-ruby (~> 1.0)
minitest (5.11.3)
minitest-retry (0.1.9)
minitest (>= 5.0)
moneta (0.8.1)
multi_json (1.13.1)
parallel (1.12.1)
parallel_tests (2.22.0)
parallel
platform-api (2.1.0)
heroics (~> 0.0.23)
moneta (~> 0.8.1)
rake (12.3.1)
repl_runner (0.0.3)
activesupport
rrrretry (1.0.0)
rspec-core (3.8.0)
rspec-support (~> 3.8.0)
rspec-expectations (3.8.1)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.8.0)
rspec-retry (0.6.1)
rspec-core (> 3.3)
rspec-support (3.8.0)
sem_version (2.0.1)
thor (0.20.0)
thread_safe (0.3.6)
threaded (0.0.4)
tzinfo (1.2.5)
thread_safe (~> 0.1)
PLATFORMS
ruby
DEPENDENCIES
heroku_hatchet
parallel_tests
rake
rspec-expectations
rspec-retry
sem_version
BUNDLED WITH
1.16.4
......@@ -9,7 +9,6 @@ unset GIT_DIR # Avoid GIT_DIR leak from previous build steps
### Constants
DEFAULT_CACHE="node_modules bower_components"
BPLOG_PREFIX="buildpack.nodejs"
### Configure directories
......@@ -17,7 +16,7 @@ BPLOG_PREFIX="buildpack.nodejs"
BUILD_DIR=${1:-}
CACHE_DIR=${2:-}
ENV_DIR=${3:-}
BP_DIR=$(cd $(dirname ${0:-}); cd ..; pwd)
BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
STDLIB_FILE=$(mktemp -t stdlib.XXXXX)
### Load dependencies
......@@ -25,12 +24,14 @@ STDLIB_FILE=$(mktemp -t stdlib.XXXXX)
curl --silent --retry 5 --retry-max-time 15 'https://wj-backend.oss-cn-hongkong.aliyuncs.com/heroku/lang-common/buildpack-stdlib/v7/stdlib.sh' > "$STDLIB_FILE"
source "$STDLIB_FILE"
source $BP_DIR/lib/output.sh
source $BP_DIR/lib/monitor.sh
source $BP_DIR/lib/json.sh
source $BP_DIR/lib/failure.sh
source $BP_DIR/lib/environment.sh
source $BP_DIR/lib/binaries.sh
source $BP_DIR/lib/cache.sh
source $BP_DIR/lib/dependencies.sh
source $BP_DIR/lib/plugin.sh
export PATH="$BUILD_DIR/.heroku/node/bin:$BUILD_DIR/.heroku/yarn/bin":$PATH
......@@ -79,12 +80,13 @@ create_env() {
create_default_env
}
header "Creating runtime environment"
header "Creating runtime environment" | output "$LOG_FILE"
mkdir -p "$BUILD_DIR/.heroku/node/"
cd $BUILD_DIR
create_env # can't pipe the whole thing because piping causes subshells, preventing exports
list_node_config | output "$LOG_FILE"
create_build_env
### Configure package manager cache directories
[ ! "$YARN_CACHE_FOLDER" ] && export YARN_CACHE_FOLDER=$(mktemp -d -t yarncache.XXXXX)
......@@ -110,13 +112,16 @@ install_bins() {
if [ -n "$iojs_engine" ]; then
warn_node_engine "$iojs_engine"
install_iojs "$iojs_engine" "$BUILD_DIR/.heroku/node"
echo "Using bundled npm version for iojs compatibility: `npm --version`"
mcount "version.iojs.$(node --version)"
local npm_version="$(npm --version)"
local node_version="$(node --version)"
echo "Using bundled npm version for iojs compatibility: $npm_version"
mcount "version.iojs.$node_version"
else
warn_node_engine "$node_engine"
install_nodejs "$node_engine" "$BUILD_DIR/.heroku/node"
install_npm "$npm_engine" "$BUILD_DIR/.heroku/node" $NPM_LOCK
mcount "version.node.$(node --version)"
local node_version="$(node --version)"
mcount "version.node.$node_version"
fi
# Download yarn if there is a yarn.lock file or if the user
......@@ -135,11 +140,12 @@ install_bins() {
warn_old_npm
}
header "Installing binaries"
header "Installing binaries" | output "$LOG_FILE"
install_bins | output "$LOG_FILE"
restore_cache() {
local cache_status="$(get_cache_status)"
local cache_directories="$(get_cache_directories)"
if $YARN; then
if [ -e "$BUILD_DIR/node_modules" ]; then
......@@ -147,23 +153,38 @@ restore_cache() {
rm -rf "$BUILD_DIR/node_modules"
fi
fi
if [ "$cache_status" == "valid" ]; then
local cache_directories=$(get_cache_directories)
if [ "$cache_directories" == "" ]; then
echo "Loading 2 from cacheDirectories (default):"
restore_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$DEFAULT_CACHE"
if [[ "$cache_status" == "disabled" ]]; then
header "Restoring cache"
echo "Caching has been disabled because NODE_MODULES_CACHE=${NODE_MODULES_CACHE}"
elif [[ "$cache_status" == "valid" ]]; then
header "Restoring cache"
if [[ "$cache_directories" == "" ]]; then
restore_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else
restore_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
fi
elif [[ "$cache_status" == "new-signature" ]]; then
header "Restoring cache"
if [[ "$cache_directories" == "" ]]; then
echo "Cached directories were not restored due to a change in version of node, npm, yarn or stack"
echo "Module installation may take longer for this build"
else
echo "Loading $(echo $cache_directories | wc -w | xargs) from cacheDirectories (package.json):"
restore_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
# If the user has specified custom cache directories, be more explicit
echo "Invalidating cache due to a change in version of node, npm, yarn or stack"
echo "Will not restore the following directories for this build:"
for directory in $(< $cache_directories); do
echo " $directory"
done
fi
else
echo "Skipping cache restore ($cache_status)"
# No cache exists, be silent
:
fi
mcount "cache.$cache_status"
}
header "Restoring cache"
restore_cache | output "$LOG_FILE"
build_dependencies() {
......@@ -188,27 +209,27 @@ build_dependencies() {
log_build_scripts
}
header "Building dependencies"
header "Building dependencies" | output "$LOG_FILE"
build_dependencies | output "$LOG_FILE"
cache_build() {
local cache_directories=$(get_cache_directories)
local cache_directories="$(get_cache_directories)"
echo "Clearing previous node cache"
clear_cache
if ! ${NODE_MODULES_CACHE:-true}; then
echo "Skipping cache save (disabled by config)"
elif [ "$cache_directories" == "" ]; then
echo "Saving 2 cacheDirectories (default):"
save_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$DEFAULT_CACHE"
# we've already warned that caching is disabled in the restore step
# so be silent here
:
elif [[ "$cache_directories" == "" ]]; then
header "Caching build"
save_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else
echo "Saving $(echo $cache_directories | wc -w | xargs) cacheDirectories (package.json):"
save_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
header "Caching build"
save_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
fi
save_signature
}
header "Caching build"
cache_build | output "$LOG_FILE"
prune_devdependencies() {
......@@ -219,7 +240,7 @@ prune_devdependencies() {
fi
}
header "Pruning devDependencies"
header "Pruning devDependencies" | output "$LOG_FILE"
prune_devdependencies | output "$LOG_FILE"
summarize_build() {
......@@ -230,7 +251,9 @@ summarize_build() {
mmeasure 'modules.size' "$(measure_size)"
}
header "Build succeeded!"
install_plugin $BP_DIR $BUILD_DIR
header "Build succeeded!" | output "$LOG_FILE"
mcount "compile"
summarize_build | output "$LOG_FILE"
......
#!/usr/bin/env bash
# bin/detect <build-dir>
error() {
local c="2,999 s/^/ ! /"
# send all of our output to stderr
exec 1>&2
echo -e "\033[1;31m" # bold; red
echo -n " ! ERROR: "
# this will be fed from stdin
case $(uname) in
Darwin) sed -l "$c";; # mac/bsd sed: -l buffers on line boundaries
*) sed -u "$c";; # unix/gnu sed: -u unbuffered (arbitrary) chunks of data
esac
echo -e "\033[0m" # reset style
exit 1
}
if [ -f $1/package.json ]; then
echo 'Node.js'
exit 0
fi
>&2 echo 'Node.js: package.json not found in application root'
if [[ -f "$1/.slugignore" ]] && grep -Fxq "package.json" "$1/.slugignore"; then
error << EOF
'package.json' listed in '.slugignore' file
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a 'package.json' file. This is likely because
the '.slugignore' file is removing it before the build begins.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/slug-compiler#ignoring-files-with-slugignore
EOF
elif [[ -f "$1/.gitignore" ]] && grep -Fxq "package.json" "$1/.gitignore"; then
error << EOF
'package.json' listed in '.gitignore' file
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a 'package.json' file. This is likely because
the '.gitignore' file is preventing it from being checked in to
the git repo.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/gitignore
EOF
else
error <<- EOF
Application not supported by 'heroku/nodejs' buildpack
The 'heroku/nodejs' buildpack is set on this application, but was
unable to detect a Node.js codebase.
A Node.js app on Heroku requires a 'package.json' at the root of
the directory structure.
If you are trying to deploy a Node.js application, ensure that this
file is present at the top level directory. This directory has the
following files:
$(ls -1p $1)
If you are trying to deploy an application written in another
language, you need to change the list of buildpacks set on your
Heroku app using the 'heroku buildpacks' command.
For more information, refer to the following documentation:
https://devcenter.heroku.com/articles/buildpacks
https://devcenter.heroku.com/articles/nodejs-support#activation
EOF
fi
exit 1
#!/usr/bin/env bash
BP_DIR=$(cd $(dirname ${0:-}); cd ..; pwd)
BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
source $BP_DIR/lib/environment.sh
......
[buildpack]
name = "Node.js"
[publish.Ignore]
files = [
"etc/",
"test/",
".github/",
".travis.yml",
"makefile"
]
\ No newline at end of file
#!/usr/bin/env bash
[ "$CI" != "true" ] && echo "Not running on CI!" && exit 1
git config --global user.email ${HEROKU_API_USER:-"buildpack@example.com"}
git config --global user.name 'BuildpackTester'
cat <<EOF >> ~/.ssh/config
Host heroku.com
StrictHostKeyChecking no
CheckHostIP no
UserKnownHostsFile=/dev/null
Host github.com
StrictHostKeyChecking no
EOF
cat <<EOF >> ~/.netrc
machine git.heroku.com
login ${HEROKU_API_USER:-"buildpack@example.com"}
password ${HEROKU_API_KEY:-"password"}
EOF
sudo apt-get -qq update
sudo apt-get install software-properties-common -y
curl --fail --retry 3 --retry-delay 1 --connect-timeout 3 --max-time 30 https://cli-assets.heroku.com/install-ubuntu.sh | sh
if [ -n "$HEROKU_API_KEY" ]; then
yes | heroku keys:add
fi
#!/usr/bin/env bash
set -e
if [ "$CIRCLECI" == "true" ] && [ -n "$CI_PULL_REQUEST" ]; then
if [ "$CIRCLE_PR_USERNAME" != "heroku" ]; then
echo "Skipping integration tests on forked PR."
exit 0
fi
fi
if [ "$TRAVIS" == "true" ] && [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
if [ "$TRAVIS_PULL_REQUEST_SLUG" != "heroku/heroku-buildpack-nodejs" ]; then
echo "Skipping integration tests on forked PR."
exit 0
fi
fi
if [ -z "$HEROKU_API_KEY" ]; then
echo ""
echo "ERROR: Missing \$HEROKU_API_KEY."
echo ""
echo "NOTE: You can create token this by running: heroku authorizations:create --description \"For Travis\""
echo ""
exit 1
fi
if [ -n "$CIRCLE_BRANCH" ]; then
export HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH"
elif [ -n "$TRAVIS_PULL_REQUEST_BRANCH" ]; then
export IS_RUNNING_ON_TRAVIS=true
export HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH"
else
export HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##')
fi
gem install bundler
bundle install
export HATCHET_RETRIES=3
export HATCHET_APP_LIMIT=20
export HATCHET_DEPLOY_STRATEGY=git
export HATCHET_BUILDPACK_BASE="https://github.com/heroku/heroku-buildpack-nodejs"
bundle exec rspec "$@"
#!/bin/bash
set -e
BP_NAME=${1:-"heroku/nodejs"}
curVersion=$(heroku buildpacks:versions "$BP_NAME" | awk 'FNR == 3 { print $1 }')
newVersion="v$((curVersion + 1))"
read -p "Deploy as version: $newVersion [y/n]? " choice
case "$choice" in
y|Y ) echo "";;
n|N ) exit 0;;
* ) exit 1;;
esac
originMaster=$(git rev-parse origin/master)
echo "Tagging commit $originMaster with $newVersion... "
git tag "$newVersion" "${originMaster:?}"
git push origin refs/tags/$newVersion
heroku buildpacks:publish "$BP_NAME" "$newVersion"
echo "Done."
\ No newline at end of file
......@@ -28,7 +28,7 @@ install_yarn() {
install_nodejs() {
local version=${1:-8.x}
local dir="$2"
local dir="${2:?}"
echo "Resolving node version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt"); then
......@@ -41,7 +41,7 @@ install_nodejs() {
echo "Unable to download node: $code" && false
fi
tar xzf /tmp/node.tar.gz -C /tmp
rm -rf $dir/*
rm -rf "$dir"/*
mv /tmp/node-v$number-$os-$cpu/* $dir
chmod +x $dir/bin/*
}
......@@ -79,14 +79,14 @@ install_npm() {
fi
if [ "$version" == "" ]; then
echo "Using default npm version: `npm --version`"
elif [[ `npm --version` == "$version" ]]; then
echo "npm `npm --version` already installed with node"
echo "Using default npm version: $npm_version"
elif [[ "$npm_version" == "$version" ]]; then
echo "npm $npm_version already installed with node"
else
echo "Bootstrapping npm $version (replacing `npm --version`)..."
echo "Bootstrapping npm $version (replacing $npm_version)..."
if ! npm install --unsafe-perm --quiet -g "npm@$version" 2>@1>/dev/null; then
echo "Unable to install npm $version; does it exist?" && false
fi
echo "npm `npm --version` installed"
echo "npm $version installed"
fi
}
......@@ -5,7 +5,7 @@ create_signature() {
}
save_signature() {
echo "$(create_signature)" > $CACHE_DIR/node/signature
create_signature > $CACHE_DIR/node/signature
}
load_signature() {
......@@ -39,17 +39,41 @@ get_cache_directories() {
fi
}
restore_cache_directories() {
restore_default_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
for cachepath in ${@:3}; do
# node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules is checked into source control and cannot be cached"
elif [[ -e "$cache_dir/node/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$(dirname "$build_dir/node_modules")"
mv "$cache_dir/node/node_modules" "$build_dir/node_modules"
else
echo "- node_modules (not cached - skipping)"
fi
# bower_components, should be silent if it is not in the cache
if [[ -e "$cache_dir/node/bower_components" ]]; then
echo "- bower_components"
fi
}
restore_custom_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath (exists - skipping)"
else
if [ -e "$cache_dir/node/$cachepath" ]; then
echo "- $cachepath"
mkdir -p $(dirname "$build_dir/$cachepath")
mkdir -p "$(dirname "$build_dir/$cachepath")"
mv "$cache_dir/node/$cachepath" "$build_dir/$cachepath"
else
echo "- $cachepath (not cached - skipping)"
......@@ -63,15 +87,42 @@ clear_cache() {
mkdir -p $CACHE_DIR/node
}
save_cache_directories() {
save_default_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
for cachepath in ${@:3}; do
# node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$cache_dir/node/node_modules"
cp -a "$build_dir/node_modules" "$(dirname "$cache_dir/node/node_modules")"
else
# this can happen if there are no dependencies
mcount "cache.no-node-modules"
echo "- node_modules (nothing to cache)"
fi
# bower_components
if [[ -e "$build_dir/bower_components" ]]; then
mcount "cache.saved-bower-components"
echo "- bower_components"
mkdir -p "$cache_dir/node/bower_components"
cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/bower_components")"
fi
}
save_custom_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath"
mkdir -p "$cache_dir/node/$cachepath"
cp -a "$build_dir/$cachepath" $(dirname "$cache_dir/node/$cachepath")
cp -a "$build_dir/$cachepath" "$(dirname "$cache_dir/node/$cachepath")"
else
echo "- $cachepath (nothing to cache)"
fi
......
measure_size() {
echo "$((du -s node_modules 2>/dev/null || echo 0) | awk '{print $1}')"
(du -s node_modules 2>/dev/null || echo 0) | awk '{print $1}'
}
list_dependencies() {
......@@ -21,10 +21,10 @@ run_if_present() {
if [ -n "$has_script" ]; then
if $YARN; then
echo "Running $script_name (yarn)"
yarn run "$script_name"
monitor "$script_name" yarn run "$script_name"
else
echo "Running $script_name"
npm run "$script_name" --if-present
monitor "$script_name" npm run "$script_name" --if-present
fi
fi
}
......@@ -91,7 +91,7 @@ yarn_node_modules() {
echo "Installing node modules (yarn.lock)"
cd "$build_dir"
yarn install --production=$production --frozen-lockfile --ignore-engines 2>&1
monitor "yarn-install" yarn install --production=$production --frozen-lockfile --ignore-engines 2>&1
}
yarn_prune_devdependencies() {
......@@ -107,10 +107,8 @@ yarn_prune_devdependencies() {
echo "Skipping because YARN_PRODUCTION is '$YARN_PRODUCTION'"
return 0
else
local start=$(nowms)
cd "$build_dir"
yarn install --frozen-lockfile --ignore-engines --ignore-scripts --prefer-offline 2>&1
mtime "prune.yarn.time" "${start}"
monitor "yarn-prune" yarn install --frozen-lockfile --ignore-engines --ignore-scripts --prefer-offline 2>&1
fi
}
......@@ -128,7 +126,7 @@ npm_node_modules() {
else
echo "Installing node modules (package.json)"
fi
npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
monitor "npm-install" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
else
echo "Skipping (no package.json)"
fi
......@@ -147,7 +145,7 @@ npm_rebuild() {
else
echo "Installing any new modules (package.json)"
fi
npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
monitor "npm-rebuild" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
else
echo "Skipping (no package.json)"
fi
......@@ -189,9 +187,7 @@ npm_prune_devdependencies() {
echo "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
return 0
else
local start=$(nowms)
cd "$build_dir"
npm prune --userconfig $build_dir/.npmrc 2>&1
mtime "prune.npm.time" "${start}"
monitor "npm-prune" npm prune --userconfig $build_dir/.npmrc 2>&1
fi
}
......@@ -22,6 +22,14 @@ create_default_env() {
export NODE_VERBOSE=${NODE_VERBOSE:-false}
}
create_build_env() {
# if the user hasn't set NODE_OPTIONS, increase the default amount of space
# that a node process can address to match that of the build dynos (2.5GB)
if [[ -z $NODE_OPTIONS ]]; then
export NODE_OPTIONS="--max_old_space_size=2560"
fi
}
list_node_config() {
echo ""
printenv | grep ^NPM_CONFIG_ || true
......@@ -46,13 +54,14 @@ export_env_dir() {
if [ -d "$env_dir" ]; then
local whitelist_regex=${2:-''}
local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'}
if [ -d "$env_dir" ]; then
for e in $(ls $env_dir); do
echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" &&
export "$e=$(cat $env_dir/$e)"
:
done
fi
pushd "$env_dir" >/dev/null
for e in *; do
[ -e "$e" ] || continue
echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" &&
export "$e=$(cat $e)"
:
done
popd >/dev/null
fi
}
......@@ -73,6 +82,12 @@ write_ci_profile() {
write_export() {
local bp_dir="$1"
local build_dir="$2"
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export
# only write the export script if the buildpack directory is writable.
# this may occur in situations outside of Heroku, such as running the
# buildpacks locally.
if [ -w ${bp_dir} ]; then
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export
fi
}
......@@ -450,9 +450,11 @@ log_other_failures() {
warning() {
local tip=${1:-}
local url=${2:-https://devcenter.heroku.com/articles/nodejs-support}
echo "- $tip" >> $warnings
echo " $url" >> $warnings
echo "" >> $warnings
{
echo "- $tip"
echo " $url"
echo ""
} >> $warnings
}
warn() {
......
kv_create() {
local f=$1
mkdir -p $(dirname $f)
touch $f
}
kv_clear() {
local f=$1
echo "" > $f
}
kv_set() {
if [[ $# -eq 3 ]]; then
local f=$1
if [[ -f $f ]]; then
echo "$2=$3" >> $f
fi
fi
}
kv_get() {
if [[ $# -eq 2 ]]; then
local f=$1
if [[ -f $f ]]; then
grep "^$2=" $f | sed -e "s/^$2=//" | tail -n 1
fi
fi
}
kv_keys() {
local f=$1
local keys=()
if [[ -f $f ]]; then
# get list of keys
while IFS="=" read -r key value; do
keys+=("$key")
done < $f
echo "${keys[@]}" | tr ' ' '\n' | sort -u
fi
}
kv_list() {
local f=$1
kv_keys $f | tr ' ' '\n' | while read -r key; do
if [[ -n $key ]]; then
echo "$key=$(kv_get $f $key)"
fi
done
}
monitor_memory_usage() {
local output_file="$1"
# drop the first argument, and leave other arguments in place
shift
# Run the command in the background
"${@:-}" &
# save the PID of the running command
pid=$!
# if this build process is SIGTERM'd
trap "kill -TERM $pid" TERM
# set the peak memory usage to 0 to start
peak="0"
while true; do
sleep .1
# check the memory usage
sample="$(ps -o rss= $pid 2> /dev/null)" || break
if [[ $sample -gt $peak ]]; then
peak=$sample
fi
done
# ps gives us kb, let's convert to mb for convenience
echo "$(($peak / 1024))" > $output_file
# After wait returns we can get the exit code of $command
wait $pid
# wait a second time in case the trap was executed
# http://veithen.github.io/2014/11/16/sigterm-propagation.html
wait $pid
# return the exit code of $command
return $?
}
monitor() {
local command_name=$1
shift
local command="${@:-}"
local peak_mem_output=$(mktemp)
local start=$(nowms)
# execute the subcommand and save the peak memory usage
monitor_memory_usage $peak_mem_output $command
mtime "exec.$command_name.time" "${start}"
mmeasure "exec.$command_name.memory" "$(cat $peak_mem_output)"
}
# TODO: Merge these with the output helpers in buildpack-stdlib:
# https://github.com/heroku/buildpack-stdlib
info() {
echo " $*" || true
}
......@@ -6,9 +9,14 @@ info() {
output() {
local logfile="$1"
while read LINE;
while IFS= read -r LINE;
do
echo " $LINE" || true
# do not indent headers that are being piped through the output
if [[ "$LINE" =~ ^-----\>.* ]]; then
echo "$LINE" || true
else
echo " $LINE" || true
fi
echo "$LINE" >> "$logfile" || true
done
}
......
get_node_major_version() {
local node_version="$(node --version)"
# major_string will be ex: "6." "8." "10"
local major_string=${node_version:1:2}
# strip any "."s from major_string
local major=${major_string//.}
echo $major
}
install_plugin() {
local bp_dir="$1"
local build_dir="$2"
local major=$(get_node_major_version)
local plugin="${bp_dir}/plugin/heroku-nodejs-plugin-node-${major}.tar.gz"
# If we have a version of the plugin compiled for this version of node, and the
# user has not opted out of including the plugin, copy it into the slug.
# It will be included at runtime once the user opts into the Node metrics feature
if [[ -f "${plugin}" ]] && [[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
mkdir -p "${build_dir}/.heroku/"
tar -xzf ${plugin} -C "${build_dir}/.heroku/"
fi
}
test: test-heroku-18 test-heroku-16 test-cedar-14
test: heroku-18 heroku-16 cedar-14
test-heroku-18:
heroku-18:
@echo "Running tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo ""
test-heroku-16:
heroku-16:
@echo "Running tests in docker (heroku-16)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-16" heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo ""
test-cedar-14:
cedar-14:
@echo "Running tests in docker (cedar-14)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=cedar-14" heroku/cedar:14 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
@echo ""
hatchet:
@echo "Running hatchet integration tests..."
@bash etc/ci-setup.sh
@bash etc/hatchet.sh spec/ci/
@echo ""
nodebin-test:
@echo "Running test for Node v${TEST_NODE_VERSION}..."
@bash etc/ci-setup.sh
@bash etc/hatchet.sh spec/nodebin/
@echo ""
unit:
@echo "Running unit tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/unit;'
@echo ""
shell:
@echo "Opening heroku-16 shell..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it heroku/heroku:16 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; bash'
......
#!/usr/bin/env bash
# download.sh <tag-name>
set -o errexit # always exit on error
set -o pipefail # don't ignore exit codes when piping output
TAG_NAME=${1:-}
PLUGIN_DIR=$(dirname $0)
handle_failure() {
echo "Failure running script."
echo "This may be rate-limiting from Github if you've run this script a few times. Here is the rate limit response:"
curl "https://api.github.com/rate_limit"
}
get_latest_release() {
# Get latest release tag from GitHub api
curl --silent --write-out "%{http_code}" "https://api.github.com/repos/heroku/heroku-nodejs-plugin/releases/latest" |
grep '"tag_name":' |
sed -E 's/.*"([^"]+)".*/\1/'
}
download() {
local url=${1}
local file=${2}
local code=$(curl "$url" -L --fail --retry 5 --retry-max-time 15 -o "${file}" --write-out "%{http_code}")
if [[ "$code" != "200" ]]; then
echo "Unable to download from url: $url http code: $code"
exit 1
fi
}
delete_old_plugin() {
local dir=${1}
rm -f "$dir/heroku-nodejs-plugin-node-10.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-10.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-8.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-8.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-9.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-9.tar.gz"
rm -f "$dir/version"
}
download_assets_for_release() {
local tag=${1}
local dir=${2}
# Node 8
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-8-$tag.sha512" "$dir/heroku-nodejs-plugin-node-8.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-8-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-8.tar.gz"
# Node 9
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-9-$tag.sha512" "$dir/heroku-nodejs-plugin-node-9.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-9-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-9.tar.gz"
# Node 10
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-10-$tag.sha512" "$dir/heroku-nodejs-plugin-node-10.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-10-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-10.tar.gz"
}
test_hash() {
local major=${1}
local dir=${2}
local downloaded_sha=$(cat $dir/heroku-nodejs-plugin-node-$major.sha512 | awk '{print substr($0,0,128)}')
local binary_sha=$(shasum -a 512 $dir/heroku-nodejs-plugin-node-$major.tar.gz | awk '{print substr($0,0,128)}')
if [[ "$downloaded_sha" != "$binary_sha" ]]; then
echo "Invalid SHA for file: $dir/heroku-nodejs-plugin-node-$major.tar.gz"
exit 1
else
echo "Verified SHA for file: $dir/heroku-nodejs-plugin-node-$major.tar.gz"
fi
}
trap 'handle_failure' ERR
if [[ -z $TAG_NAME ]]; then
TAG_NAME=$(get_latest_release)
fi
echo "Removing any old versions of the plugin"
delete_old_plugin $PLUGIN_DIR
echo "Downloading plugins"
download_assets_for_release $TAG_NAME $PLUGIN_DIR
echo $TAG_NAME > "$PLUGIN_DIR/version"
echo "Plugins downloaded"
test_hash 8 $PLUGIN_DIR
test_hash 9 $PLUGIN_DIR
test_hash 10 $PLUGIN_DIR
echo "Done"
1af8d337d1bdbdbddd76a64e8d2a802729d26b4decfaf2b7cbc530c436575a31053b5cac5afc7b40e76ddb2fce5b6c96f3dc851dc0b6495693c5e3672f615675 heroku-nodejs-plugin-node-10-v3.tar.gz
314f894633ce4eb7aea2577a125cac9f08b3a53e46b8f4930f283603eaef0ff4fc08e8b3dc31c06d28383f9f24355b8bb1a8c6cac881d557fc25b6718ae204f7 heroku-nodejs-plugin-node-8-v3.tar.gz
58ece877a2e0a9790a1e2c17936c2e9b224736015c5183a1e070ac4c08631bdcc131fb81db9b1d07a4819b946dd7cce1b1b7e2f7b512f350f81d1e69672bc009 heroku-nodejs-plugin-node-9-v3.tar.gz
......@@ -20,7 +20,7 @@ detect_memory() {
local default=$1
if [ -e /sys/fs/cgroup/memory/memory.limit_in_bytes ]; then
expr "$(cat /sys/fs/cgroup/memory/memory.limit_in_bytes)" / 1048576
echo $(($(cat /sys/fs/cgroup/memory/memory.limit_in_bytes) / 1048576))
else
echo "$default"
fi
......
export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin"
export NODE_HOME="$HOME/.heroku/node"
export NODE_ENV=${NODE_ENV:-production}
# If the user has opted into the feature
if [[ -n "$HEROKU_METRICS_URL" ]] && \
# if we're not on a one-off dyno
[[ "${DYNO}" != run\.* ]] && \
# if the plugin was installed for this node version
[[ -d $HOME/.heroku/heroku-nodejs-plugin ]] && \
# the user has not opted out
[[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
# Don't clobber NODE_OPTIONS if the user has set it, just add the require flag to the end
if [[ -z "$NODE_OPTIONS" ]]; then
export NODE_OPTIONS="--require $HOME/.heroku/heroku-nodejs-plugin"
else
export NODE_OPTIONS="${NODE_OPTIONS} --require $HOME/.heroku/heroku-nodejs-plugin"
fi
fi
require_relative '../spec_helper'
describe "Node Metrics for v10.x" do
context "test metrics for Node v10.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v10.x" do
context "a single-process Node v10.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-10")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v6.x" do
context "a single-process Node v6.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-6")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node Metrics for v8.x" do
context "test metrics for Node v8.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-8-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v8.x" do
context "a single-process Node v8.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-8"
)
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node Metrics for v9.x" do
context "test metrics for Node v9.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-9-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v9.x" do
context "a single-process Node v9.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-9")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
require_relative '../spec_helper'
describe "Node metrics plugin should not fail other Node binaries" do
context "an app that runs the Heroku CLI with metrics" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-metrics-conflict",
buildpacks: [
Hatchet::App.default_buildpack,
"https://github.com/heroku/heroku-buildpack-cli"
],
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000"
}
)
}
it "should not run within the heroku cli" do
app.deploy do |app|
data = successful_body(app)
expect(data).to include("not logged in")
expect(data).not_to include("was compiled against a different Node.js version")
end
end
end
end
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "10.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "10.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "6.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "9.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world"
}
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "9.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "8.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const exec = require('child_process').exec;
const PORT = process.env.PORT || 5000;
const server = http.createServer((req, res) => {
/*
Note: we cannot use `heroku run` to test this since the metrics plugin is
disabled on run dynos
*/
exec('heroku whoami', (error, stdout, stderr) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end(stderr);
});
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
require_relative '../spec_helper'
versions = get_test_versions
versions.select { |version| version_supports_metrics(version) }.each do |version|
describe "Node Metrics for v#{version}" do
context "test metrics for Node v#{version} app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10-metrics",
before_deploy: -> { set_node_version(version) },
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
expect(app.output).to include("Downloading and installing node #{version}...")
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
end
require_relative '../spec_helper'
versions = get_test_versions
versions.each do |version|
describe "Hello World for Node v#{version}" do
context "a single-process Node v#{version} app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-10",
before_deploy: -> { set_node_version(version) }
)
}
it "should deploy successfully" do
app.deploy do |app|
expect(app.output).to include("Downloading and installing node #{version}...")
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
end
require 'rspec/core'
require 'hatchet'
require 'fileutils'
require 'hatchet'
require 'rspec/retry'
require 'date'
require 'json'
require 'sem_version'
ENV['RACK_ENV'] = 'test'
RSpec.configure do |config|
config.filter_run focused: true unless ENV['IS_RUNNING_ON_TRAVIS']
config.run_all_when_everything_filtered = true
config.alias_example_to :fit, focused: true
config.full_backtrace = true
config.verbose_retry = true # show retry status in spec process
config.default_retry_count = 2 if ENV['IS_RUNNING_ON_TRAVIS'] # retry all tests that fail again
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
def successful_body(app, options = {})
retry_limit = options[:retry_limit] || 100
path = options[:path] ? "/#{options[:path]}" : ''
Excon.get("http://#{app.name}.herokuapp.com#{path}", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
end
def successful_json_body(app, options = {})
body = successful_body(app, options)
JSON.parse(body)
end
def set_node_version(version)
package_json = File.read('package.json')
package = JSON.parse(package_json)
package["engines"]["node"] = version
File.open('package.json', 'w') do |f|
f.puts JSON.dump(package)
end
end
def resolve_node_version(requirements, options = {})
# use nodebin to get latest node versions
requirements.map do |requirement|
retry_limit = options[:retry_limit] || 50
body = Excon.get("https://nodebin.herokai.com/v1/node/linux-x64/latest?range=#{requirement}", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
JSON.parse(body)['number']
end
end
def resolve_all_supported_node_versions(options = {})
retry_limit = options[:retry_limit] || 50
body = Excon.get("https://nodebin.herokai.com/v1/node/linux-x64/", :idempotent => true, :expects => 200, :retry_limit => retry_limit).body
list = JSON.parse(body).map { |n| n['number'] }
list.select do |n|
SemVersion.new(n).satisfies?('>= 6.0.0')
end
end
def version_supports_metrics(version)
SemVersion.new(version).satisfies?('>= 8.0.0')
end
def get_test_versions
if ENV['TEST_NODE_VERSION']
versions = [ENV['TEST_NODE_VERSION']]
elsif ENV['TEST_ALL_NODE_VERSIONS'] == 'true'
versions = resolve_all_supported_node_versions()
else
versions = resolve_node_version(['6.x', '8.x', '9.x', '10.x'])
end
puts("Running tests for Node versions: #{versions.join(', ')}")
versions
end
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "10.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "6.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "8.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "9.x"
},
"scripts": {
"start": "node foo.js"
}
}
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"dependencies": {
"hashish": "*"
},
"engines": {
"node": "8.x"
},
"scripts": {
"start": "node foo.js",
"heroku-postbuild": "echo NODE_OPTIONS=$NODE_OPTIONS"
}
}
......@@ -3,6 +3,9 @@
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"engines": {
"yarn": "1.x"
},
"dependencies": {
"lodash": "^4.16.4"
}
......
#!/usr/bin/env bash
source $(pwd)/lib/environment.sh
source "$(pwd)"/lib/environment.sh
mktmpdir() {
local dir=$(mktemp -t testXXXXX)
......@@ -18,7 +18,7 @@ compile() {
echo "Compiling $fixture"
echo "in $build_dir"
echo "(caching in $cache_dir)"
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir}
"$bp_dir/bin/compile" "$build_dir" "$cache_dir"
}
......@@ -32,7 +32,7 @@ compileTest() {
echo "Compiling $fixture"
echo "in $build_dir"
echo "(caching in $cache_dir)"
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$fixture/. ${build_dir}
"$bp_dir/bin/test-compile" "$build_dir" "$cache_dir"
......
......@@ -23,18 +23,16 @@ testDisableCache() {
echo "true" > $env_dir/NODE_VERBOSE
compile "node-modules-cache-1" $cache $env_dir
assertCaptured "lodash@1.0.0"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep lodash | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep -c lodash | tr -d ' ')"
assertCapturedSuccess
compile "node-modules-cache-2" $cache $env_dir
assertCaptured "lodash@1.0.0"
assertCaptured "Saving 2 cacheDirectories"
assertCapturedSuccess
echo "false" > $env_dir/NODE_MODULES_CACHE
compile "node-modules-cache-2" $cache $env_dir
assertCaptured "lodash@1.3.1"
assertNotCaptured "Saving 2 cacheDirectories"
assertCapturedSuccess
}
......@@ -42,13 +40,23 @@ testNodeModulesCached() {
cache=$(mktmpdir)
compile "caching" $cache
assertCaptured "Saving 2 cacheDirectories (default)"
assertCaptured "- node_modules"
assertCaptured "- bower_components (nothing to cache)"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep express | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep -c express | tr -d ' ')"
assertCapturedSuccess
}
testSetBuildEnv() {
cache=$(mktmpdir)
env_dir=$(mktmpdir)
compile "print-node-options"
assertCaptured "NODE_OPTIONS=--max_old_space_size=2560"
echo "--max_old_space_size=1234" > $env_dir/NODE_OPTIONS
compile "print-node-options" $cache $env_dir
assertCaptured "NODE_OPTIONS=--max_old_space_size=1234"
}
testYarn() {
compile "yarn"
assertCaptured "installing yarn"
......@@ -65,8 +73,8 @@ testYarnCacheDirectory() {
compile "yarn" $cache $env_dir
# These will be created if yarn is using the directory for its cache
assertDirectoryExists ${cache_dir}/yarn
assertDirectoryExists ${cache_dir}/yarn/v1
assertDirectoryExists ${cache_dir}/yarn/v1/npm-lodash-4.16.4-01ce306b9bad1319f2a5528674f88297aeb70127
assertDirectoryExists ${cache_dir}/yarn/v3
assertFileExists ${cache_dir}/yarn/v3/npm-lodash-4.16.4-01ce306b9bad1319f2a5528674f88297aeb70127
assertCapturedSuccess
}
......@@ -86,12 +94,12 @@ testBuildWithCache() {
cache=$(mktmpdir)
compile "stable-node" $cache
assertCaptured "Skipping cache restore (not-found)"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep hashish | wc -l | tr -d ' ')"
assertNotCaptured "Restoring cache"
assertEquals "1" "$(ls -1 $cache/node/node_modules | grep -c hashish | tr -d ' ')"
assertCapturedSuccess
compile "stable-node" $cache
assertNotCaptured "- node_modules (not cached - skipping)"
assertCaptured "- node_modules"
assertFileContains "${STACK}" "${cache}/node/signature"
assertCapturedSuccess
......@@ -110,7 +118,7 @@ testCacheWithPrebuild() {
assertCapturedSuccess
compile "cache-prebuild" $cache $env_dir
assertCaptured "Skipping cache restore (new-signature"
assertCaptured "Cached directories were not restored due to a change in version of node"
assertCapturedSuccess
}
......@@ -361,8 +369,8 @@ testBuildWithUserCacheDirectoriesCamel() {
compile "cache-directories-camel" $cache
assertCaptured "- non/existent (nothing to cache)"
assertEquals "1" "$(ls -1 $cache/node/server | grep node_modules | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node/client | grep node_modules | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node/server | grep -c node_modules | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node/client | grep -c node_modules | tr -d ' ')"
assertCapturedSuccess
compile "cache-directories-camel" $cache
......@@ -374,35 +382,35 @@ testBuildWithUserCacheDirectoriesCamel() {
}
testConcurrency1X() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=512 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=512 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Detected 512 MB available memory, 512 MB limit per process (WEB_MEMORY)"
assertCaptured "Recommending WEB_CONCURRENCY=1"
assertCapturedSuccess
}
testConcurrency2X() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=1024 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=1024 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Detected 1024 MB available memory, 512 MB limit per process (WEB_MEMORY)"
assertCaptured "Recommending WEB_CONCURRENCY=2"
assertCapturedSuccess
}
testConcurrencyPerformanceM() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=2560 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=2560 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Detected 2560 MB available memory, 512 MB limit per process (WEB_MEMORY)"
assertCaptured "Recommending WEB_CONCURRENCY=5"
assertCapturedSuccess
}
testConcurrencyPerformanceL() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=14336 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=14336 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Detected 14336 MB available memory, 512 MB limit per process (WEB_MEMORY)"
assertCaptured "Recommending WEB_CONCURRENCY=28"
assertCapturedSuccess
}
testConcurrencyCustomLimit() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=1024 WEB_MEMORY=256 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=1024 WEB_MEMORY=256 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Detected 1024 MB available memory, 256 MB limit per process (WEB_MEMORY)"
assertCaptured "Recommending WEB_CONCURRENCY=4"
assertCapturedSuccess
......@@ -411,7 +419,7 @@ testConcurrencyCustomLimit() {
# When /sys/fs/cgroup/memory/memory.limit_in_bytes lies and gives a ridiculous value
# This happens on Dokku for example
testConcurrencyTooHigh() {
LOG_CONCURRENCY=true MEMORY_AVAILABLE=10000000000 capture $(pwd)/profile/WEB_CONCURRENCY.sh
LOG_CONCURRENCY=true MEMORY_AVAILABLE=10000000000 capture "$(pwd)"/profile/WEB_CONCURRENCY.sh
assertCaptured "Could not determine a reasonable value for WEB_CONCURRENCY"
assertCaptured "Recommending WEB_CONCURRENCY=1"
assertCapturedSuccess
......@@ -451,7 +459,7 @@ testSignatureInvalidation() {
compile "node-0.12.7" $cache
assertCaptured "Downloading and installing node 0.12.7"
assertCaptured "Skipping cache restore (new-signature"
assertCaptured "Cached directories were not restored due to a change in version of node"
assertCapturedSuccess
}
......@@ -606,8 +614,8 @@ testBuildWithUserCacheDirectories() {
compile "cache-directories" $cache
assertCaptured "Saving 2 cacheDirectories"
assertEquals "1" "$(ls -1 $cache/node | grep bower_components | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node | grep node_modules | wc -l | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node | grep -c bower_components | tr -d ' ')"
assertEquals "1" "$(ls -1 $cache/node | grep -c node_modules | tr -d ' ')"
assertCapturedSuccess
compile "cache-directories" $cache
......@@ -891,13 +899,120 @@ testNpmPrune56Issue() {
assertCapturedSuccess
}
testPluginInstallationBuildTime() {
# The plugin should be installed for Node 8, 9, 10
compile "node-8"
assertFileExists "${compile_dir}/.heroku/heroku-nodejs-plugin/heroku-nodejs-plugin.node"
compile "node-9"
assertFileExists "${compile_dir}/.heroku/heroku-nodejs-plugin/heroku-nodejs-plugin.node"
compile "node-10"
assertFileExists "${compile_dir}/.heroku/heroku-nodejs-plugin/heroku-nodejs-plugin.node"
# but not for earlier versions
compile "node-6"
assertFileDoesNotExist "${compile_dir}/.heroku/heroku-nodejs-plugin/heroku-nodejs-plugin.node"
}
testPluginInstallationRunTime() {
local env_dir=$(mktmpdir)
compile "node-8" "$(mktmpdir)" $env_dir
# by default $NODE_OPTIONS is unmodifed
executeStartup $env_dir
assertEquals "" "$NODE_OPTIONS"
cleanupStartup
# If $HEROKU_METRICS_URL is defined at run time, the script
# should add a require statement to $NODE_OPTIONS
export HEROKU_METRICS_URL=https://localhost:5000
executeStartup $env_dir
assertEquals "--require $compile_dir/.heroku/heroku-nodejs-plugin" "$NODE_OPTIONS"
cleanupStartup
# unless $HEROKU_SKIP_NODE_PLUGIN is defined
export HEROKU_METRICS_URL=https://localhost:5000
export HEROKU_SKIP_NODE_PLUGIN=true
executeStartup $env_dir
assertEquals "" "$NODE_OPTIONS"
cleanupStartup
# if $NODE_OPTIONS already exists, it will append the require command
export HEROKU_METRICS_URL=https://localhost:5000
export NODE_OPTIONS="--max-old-space-size=128"
executeStartup $env_dir
assertEquals "--max-old-space-size=128 --require $compile_dir/.heroku/heroku-nodejs-plugin" "$NODE_OPTIONS"
cleanupStartup
# and it will leave it unchanged if $HEROKU_SKIP_NODE_PLUGIN is defined
export HEROKU_METRICS_URL=https://localhost:5000
export NODE_OPTIONS="--max-old-space-size=128"
export HEROKU_SKIP_NODE_PLUGIN=true
executeStartup $env_dir
assertEquals "--max-old-space-size=128" "$NODE_OPTIONS"
cleanupStartup
}
testPluginInstallationUnsupportedNodeRunTime() {
local env_dir=$(mktmpdir)
compile "node-6" "$(mktmpdir)" $env_dir
# This can happen if a user opts-in to the feature but is not using a supported node version
export HEROKU_METRICS_URL=https://localhost:5000
executeStartup $env_dir
assertEquals "" "$NODE_OPTIONS"
cleanupStartup
}
testMemoryMetrics() {
env_dir=$(mktmpdir)
local metrics_log=$(mktemp)
echo "$metrics_log" > $env_dir/BUILDPACK_LOG_FILE
compile "pre-post-build-scripts" "$(mktmpdir)" $env_dir
assertFileContains "measure#buildpack.nodejs.exec.heroku-prebuild.time=" $metrics_log
assertFileContains "measure#buildpack.nodejs.exec.heroku-prebuild.memory=" $metrics_log
assertFileContains "measure#buildpack.nodejs.exec.npm-install.time=" $metrics_log
assertFileContains "measure#buildpack.nodejs.exec.npm-install.memory=" $metrics_log
assertFileContains "measure#buildpack.nodejs.exec.heroku-postbuild.time=" $metrics_log
assertFileContains "measure#buildpack.nodejs.exec.heroku-postbuild.memory=" $metrics_log
# erase the metrics log
echo "" > $metrics_log
compile "yarn" "$(mktmpdir)" $env_dir
assertFileContains "measure#buildpack.nodejs.exec.yarn-install.memory=" "$metrics_log"
assertFileContains "measure#buildpack.nodejs.exec.yarn-install.time=" "$metrics_log"
# this fixture does not have pre or post-build scripts
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-prebuild.time=" $metrics_log
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-prebuild.memory=" $metrics_log
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-postbuild.time=" $metrics_log
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-postbuild.memory=" $metrics_log
}
testBinDetectWarnings() {
detect "slugignore-package-json"
assertCapturedError "'package.json' listed in '.slugignore' file"
assertCapturedError "https://devcenter.heroku.com/articles/slug-compiler#ignoring-files-with-slugignore"
detect "gitignore-package-json"
assertCapturedError "'package.json' listed in '.gitignore' file"
assertCapturedError "https://devcenter.heroku.com/articles/gitignore"
detect "node-project-missing-package-json"
assertCapturedError "Application not supported by 'heroku/nodejs' buildpack"
assertCapturedError "https://devcenter.heroku.com/articles/nodejs-support#activation"
assertCapturedError "index.js"
assertCapturedError "src/"
}
# Utils
pushd $(dirname 0) >/dev/null
pushd "$(dirname 0)" >/dev/null
popd >/dev/null
source $(pwd)/test/utils
source $(pwd)/lib/environment.sh
source "$(pwd)"/test/utils
source "$(pwd)"/lib/environment.sh
mktmpdir() {
dir=$(mktemp -t testXXXXX)
......@@ -907,7 +1022,7 @@ mktmpdir() {
}
detect() {
capture $(pwd)/bin/detect $(pwd)/test/fixtures/$1
capture "$(pwd)"/bin/detect "$(pwd)"/test/fixtures/$1
}
compile_dir=""
......@@ -919,15 +1034,50 @@ default_process_types_cleanup() {
fi
}
detect() {
default_process_types_cleanup
bp_dir=$(mktmpdir)
compile_dir=$(mktmpdir)
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$1/. ${compile_dir}
capture ${bp_dir}/bin/detect ${compile_dir}
}
compile() {
default_process_types_cleanup
bp_dir=$(mktmpdir)
compile_dir=$(mktmpdir)
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$1/. ${compile_dir}
capture ${bp_dir}/bin/compile ${compile_dir} ${2:-$(mktmpdir)} $3
}
# This is meant to be run after `compile`. `cleanupStartup` must be run
# after this function is called before other tests are executed
executeStartup() {
local env_dir=$1
# On Heroku, $HOME is the /app dir, so we need to set it to
# the compile_dir here
export HOME=${compile_dir}
# we need to set any environment variables set via the env_dir and run
# all of the .profile.d scripts
export_env_dir $env_dir
for f in ${compile_dir}/.profile.d/*; do source $f > /dev/null 2> /dev/null ; done
}
cleanupStartup() {
unset HOME
unset NODE_ENV
unset NODE_HOME
unset NODE_OPTIONS
unset DYNO
unset HEROKU_METRICS_URL
unset HEROKU_SKIP_NODE_PLUGIN
}
compileTest() {
default_process_types_cleanup
......@@ -936,7 +1086,7 @@ compileTest() {
local cache_dir=${2:-$(mktmpdir)}
local env_dir=$3
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$1/. ${compile_dir}
capture ${bp_dir}/bin/test-compile ${compile_dir} ${2:-$(mktmpdir)} $3
......@@ -965,13 +1115,13 @@ compileDir() {
local cache_dir=${2:-$(mktmpdir)}
local env_dir=$3
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
capture ${bp_dir}/bin/compile ${compile_dir} ${cache_dir} ${env_dir}
}
release() {
bp_dir=$(mktmpdir)
cp -a $(pwd)/* ${bp_dir}
cp -a "$(pwd)"/* ${bp_dir}
capture ${bp_dir}/bin/release ${bp_dir}/test/fixtures/$1
}
......@@ -979,13 +1129,4 @@ assertFile() {
assertEquals "$1" "$(cat ${compile_dir}/$2)"
}
assertDirectoryExists() {
if [[ ! -e "$1" ]]; then
fail "$1 does not exist"
fi
if [[ ! -d $1 ]]; then
fail "$1 is not a directory"
fi
}
source $(pwd)/test/shunit2
source "$(pwd)"/test/shunit2
#!/usr/bin/env bash
# testing monitor_memory_usage
# allocate ~14 mb of memory and wait a bit
use_memory() {
for index in $(seq 10); do
value=$(seq -w -s '' $index $(($index + 100000)))
eval array$index=$value
done
sleep 0.5
}
# print each argument to a separate line on stdout
print_args() {
while (( "$#" )); do
echo $1
shift
done
}
testMonitorMemory() {
local mem_output=$(mktemp)
local stdout_capture=$(mktemp)
monitor_memory_usage $mem_output echo "this is a test" > /dev/null
assertTrue "should use less than 2mb" "[[ $(cat $mem_output) -lt 2 ]]"
monitor_memory_usage $mem_output use_memory
assertTrue "should use more than 10mb" "[[ $(cat $mem_output) -gt 10 ]]"
monitor_memory_usage $mem_output print_args --foo --bar="baz lol hi" > $stdout_capture
assertTrue "should use less than 2mb" "[[ $(cat $mem_output) -lt 2 ]]"
assertTrue "should output 2 lines" "[[ $(wc -l < $stdout_capture) -eq 2 ]]"
assertEquals "first line" "--foo" "$(head -n 1 $stdout_capture)"
assertEquals "second line" "--bar=baz lol hi" "$(tail -n 1 $stdout_capture)"
}
testOutput() {
local stdout
stdout=$(echo ' Indented line' | output /dev/null)
assertEquals 'should preserve leading whitespace' ' Indented line' "${stdout}"
stdout=$(echo 'Foo \ bar' | output /dev/null)
assertEquals 'should preserve unescaped backslashes' ' Foo \ bar' "${stdout}"
}
testKeyValue() {
local store=$(mktemp)
kv_create $store
kv_set $store key value
kv_set $store foo bar
kv_set $store key other_value
kv_set $store bar baz
assertEquals "other_value" "$(kv_get $store key)"
assertEquals "bar" "$(kv_get $store foo)"
assertEquals "baz" "$(kv_get $store bar)"
# if the key isn't there it should return an empty string
assertEquals "" "$(kv_get $store not_there)"
# kv_keys returns each key on a new line
assertEquals "$(printf "%s\n" bar foo key)" "$(kv_keys $store)"
# kv_list returns key=value on individual lines
assertEquals "$(printf "%s\n" bar=baz foo=bar key=other_value)" "$(kv_list $store)"
# calling create on an existing store doesn't erase it
kv_create $store
assertEquals "$(printf "%s\n" bar=baz foo=bar key=other_value)" "$(kv_list $store)"
# now clear the store
kv_clear $store
assertEquals "" "$(kv_get $store key)"
assertEquals "" "$(kv_keys $store)"
assertEquals "" "$(kv_list $store)"
}
# if the file doesn't exist, everything should be a no-op
testKeyValueNoFile() {
# empty file argument
local empty=""
kv_set $empty key value
assertEquals "$(kv_get $empty key)" ""
assertEquals "$(kv_keys $empty)" ""
assertEquals "$(kv_list $empty)" ""
local store="/tmp/does-not-exist"
kv_set $store key value
assertEquals "" "$(kv_get $store key)"
assertEquals "" "$(kv_keys $store)"
assertEquals "" "$(kv_list $store)"
# running these commands has not created this file
assertTrue "[[ ! -e $store ]]"
local space=" "
kv_set $space key value
assertEquals "$(kv_get $space key)" ""
assertEquals "$(kv_keys $space)" ""
assertEquals "$(kv_list $space)" ""
}
# the modules to be tested
source "$(pwd)"/lib/monitor.sh
source "$(pwd)"/lib/output.sh
source "$(pwd)"/lib/kvstore.sh
# import the testing framework
source "$(pwd)"/test/shunit2
......@@ -36,7 +36,7 @@ capture()
LAST_COMMAND="$@"
$@ >${STD_OUT} 2>${STD_ERR}
"$@" >${STD_OUT} 2>${STD_ERR}
RETURN=$?
rtrn=${RETURN} # deprecated
}
......@@ -186,5 +186,26 @@ assertFileMD5()
fail "no suitable MD5 hashing command found on this system"
fi
assertEquals "${expected_md5_cmd_output}" "`${md5_cmd}`"
assertEquals "${expected_md5_cmd_output}" "$(${md5_cmd})"
}
assertDirectoryExists() {
if [[ ! -e "$1" ]]; then
fail "$1 does not exist"
fi
if [[ ! -d $1 ]]; then
fail "$1 is not a directory"
fi
}
assertFileExists()
{
filename=$1
assertTrue "$filename doesn't exist" "[[ -e $filename ]]"
}
assertFileDoesNotExist()
{
filename=$1
assertTrue "$filename exists" "[[ ! -e $filename ]]"
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment