Commit db0e7358 authored by Unknown's avatar Unknown

Merge remote-tracking branch 'upstream/master'

parents e87014e5 76ba1980
language: bash
language: go
go:
- "1.12"
sudo: required
services:
- docker
env:
- TEST=shellcheck
- TEST=heroku-18 STACK=heroku-18
- TEST=heroku-16 STACK=heroku-16
- TEST=cedar-14 STACK=cedar-14
- TEST=hatchet
- TEST=unit
- TEST=test-binary GO111MODULE=on
install:
- if [[ -n $STACK ]]; then
docker pull "heroku/${STACK/-/:}";
......
......@@ -2,6 +2,64 @@
## master
## v148 (2019-05-02)
- Dark-launch new semver matching logic for node binaries (#663)
## v147 (2019-05-01)
- Dark-launch new semver matching logic for yarn binaries (#661)
- Add node 12.x as a supported version for Node Metrics beta (#662)
## v146 (2019-04-25)
- Deprecate io.js as an alternative runtime (#658)
- Prototyping new version resolution approach to replace Nodebin (#649 - #657)
## v145 (2019-04-16)
- Separate prebuild step in log output (#646)
- Clean up script metrics and logging (#647)
## v144 (2019-04-08)
- Remove temporary wraning about "run build" change (#644)
## v143 (2019-03-28)
- Internal logging changes (#637, #631, #630)
## v142 (2019-03-11)
- Add temporary warning about "run build" when the build fails as well (#639)
## v141 (2019-03-11)
- Add temporary warning about "run build" change to log output (#636)
## v140 (2019-03-11)
- Run the build script by default (#628)
## v139 (2019-03-04)
- Make breaking change warning header brighter (#627)
## v138 (2019-02-20)
- Add new Build header (#625)
- Fix yarn run error when script is empty string (#624)
## v137 (2019-02-14)
- Internal logging changes (#620, #618, #621)
- Detect build scripts even when they are empty (#617)
## v136 (2019-02-09)
- Add warning for the upcoming run build change (#616)
## v135 (2019-02-06)
- Fix bug where failing builds on CI would not fail CI (#613)
- Internal logging changes (#596, #600)
## v134 (2018-12-20)
- Internal changes (#593, #591)
- Handle `$MEMORY_AVAILABLE` when `memory.limit_in_bytes` is nonsensically large (#531)
## v133 (2018-11-28)
- Add warning for flatmap-stream 404 failure (#590)
## v132 (2018-11-12)
- Quietly add new build script behavior behind a flag (#584, #585)
......@@ -66,6 +124,7 @@
## v119 (2018-02-28)
- Install and prune devDependencies by default (#519)
- [Breaking] Stop setting the env var `NPM_CONFIG_PRODUCTION=true` by default
## v118 (2018-02-02)
......
......@@ -86,3 +86,8 @@ make test-heroku-16
The tests are run via the vendored
[shunit2](https://github.com/kward/shunit2)
test framework.
## Updating go binaries
If you would like to develop and update the go binaries you will need to install
[go 1.12](https://golang.org/doc/install#install) and [upx](https://upx.github.io/)
......@@ -9,6 +9,8 @@ unset GIT_DIR # Avoid GIT_DIR leak from previous build steps
### Constants
# This is used by the buildpack stdlib for metrics
# shellcheck disable=SC2034
BPLOG_PREFIX="buildpack.nodejs"
### Configure directories
......@@ -16,65 +18,91 @@ BPLOG_PREFIX="buildpack.nodejs"
BUILD_DIR=${1:-}
CACHE_DIR=${2:-}
ENV_DIR=${3:-}
BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
BP_DIR=$(cd "$(dirname "${0:-}")"; cd ..; pwd)
STDLIB_FILE=$(mktemp -t stdlib.XXXXX)
### Load dependencies
curl --silent --retry 5 --retry-max-time 15 'https://wj-backend.oss-cn-hongkong.aliyuncs.com/heroku/lang-common/buildpack-stdlib/v7/stdlib.sh' > "$STDLIB_FILE"
# shellcheck source=/dev/null
source "$STDLIB_FILE"
source $BP_DIR/lib/output.sh
source $BP_DIR/lib/monitor.sh
source $BP_DIR/lib/json.sh
source $BP_DIR/lib/failure.sh
source $BP_DIR/lib/environment.sh
source $BP_DIR/lib/binaries.sh
source $BP_DIR/lib/cache.sh
source $BP_DIR/lib/dependencies.sh
source $BP_DIR/lib/plugin.sh
# shellcheck source=lib/output.sh
source "$BP_DIR/lib/output.sh"
# shellcheck source=lib/monitor.sh
source "$BP_DIR/lib/monitor.sh"
# shellcheck source=lib/environment.sh
source "$BP_DIR/lib/environment.sh"
# shellcheck source=lib/failure.sh
source "$BP_DIR/lib/failure.sh"
# shellcheck source=lib/binaries.sh
source "$BP_DIR/lib/binaries.sh"
# shellcheck source=lib/json.sh
source "$BP_DIR/lib/json.sh"
# shellcheck source=lib/cache.sh
source "$BP_DIR/lib/cache.sh"
# shellcheck source=lib/dependencies.sh
source "$BP_DIR/lib/dependencies.sh"
# shellcheck source=lib/plugin.sh
source "$BP_DIR/lib/plugin.sh"
# shellcheck source=lib/uuid.sh
source "$BP_DIR/lib/uuid.sh"
# shellcheck source=lib/kvstore.sh
source "$BP_DIR/lib/kvstore.sh"
# shellcheck source=lib/metadata.sh
source "$BP_DIR/lib/metadata.sh"
# shellcheck source=lib/builddata.sh
source "$BP_DIR/lib/builddata.sh"
export PATH="$BUILD_DIR/.heroku/node/bin:$BUILD_DIR/.heroku/yarn/bin":$PATH
LOG_FILE=$(mktemp -t node-build-log.XXXXX)
echo "" > "$LOG_FILE"
build_start_time=$(nowms)
### Handle errors
handle_failure() {
meta_set "node-build-success" "false"
header "Build failed"
fail_yarn_outdated "$LOG_FILE"
fail_yarn_lockfile_outdated "$LOG_FILE"
fail_node_install "$LOG_FILE"
fail_yarn_install "$LOG_FILE"
fail_node_install "$LOG_FILE" "$BUILD_DIR"
fail_yarn_install "$LOG_FILE" "$BUILD_DIR"
fail_invalid_semver "$LOG_FILE"
log_other_failures "$LOG_FILE"
warn_untracked_dependencies "$LOG_FILE"
warn_angular_resolution "$LOG_FILE"
warn_missing_devdeps "$LOG_FILE"
warn_missing_devdeps "$LOG_FILE" "$BUILD_DIR"
warn_econnreset "$LOG_FILE"
log_meta_data >> "$BUILDPACK_LOG_FILE"
failure_message | output "$LOG_FILE"
}
trap 'handle_failure' ERR
### Initalize metadata store
meta_create "$CACHE_DIR"
### Check initial state
[ -e "$BUILD_DIR/node_modules" ] && PREBUILD=true || PREBUILD=false
[ -f "$BUILD_DIR/yarn.lock" ] && YARN=true || YARN=false
[ -f "$BUILD_DIR/package-lock.json" ] && NPM_LOCK=true || NPM_LOCK=false
### Save build info
log_initial_state
generate_uuids
### Failures that should be caught immediately
fail_dot_heroku "$BUILD_DIR"
fail_dot_heroku_node "$BUILD_DIR"
fail_invalid_package_json "$BUILD_DIR"
fail_multiple_lockfiles "$BUILD_DIR"
fail_iojs_unsupported "$BUILD_DIR"
warn_prebuilt_modules "$BUILD_DIR"
warn_missing_package_json "$BUILD_DIR"
### Behavior flags
[ ! "$NEW_BUILD_SCRIPT_BEHAVIOR" ] && NEW_BUILD_SCRIPT_BEHAVIOR=$(read_json "$BUILD_DIR/package.json" ".[\"heroku-run-build-script\"]")
warn_build_script_behavior_opt_in $NEW_BUILD_SCRIPT_BEHAVIOR | output "$LOG_FILE"
### Compile
create_env() {
......@@ -87,58 +115,58 @@ create_env() {
header "Creating runtime environment" | output "$LOG_FILE"
mkdir -p "$BUILD_DIR/.heroku/node/"
cd $BUILD_DIR
cd "$BUILD_DIR"
create_env # can't pipe the whole thing because piping causes subshells, preventing exports
list_node_config | output "$LOG_FILE"
create_build_env
### Configure package manager cache directories
[ ! "$YARN_CACHE_FOLDER" ] && export YARN_CACHE_FOLDER=$(mktemp -d -t yarncache.XXXXX)
[ ! "$NPM_CONFIG_CACHE" ] && export NPM_CONFIG_CACHE=$(mktemp -d -t npmcache.XXXXX)
[ ! "$YARN_CACHE_FOLDER" ] && YARN_CACHE_FOLDER=$(mktemp -d -t yarncache.XXXXX)
[ ! "$NPM_CONFIG_CACHE" ] && NPM_CONFIG_CACHE=$(mktemp -d -t npmcache.XXXXX)
export YARN_CACHE_FOLDER NPM_CONFIG_CACHE
install_bins() {
local node_engine=$(read_json "$BUILD_DIR/package.json" ".engines.node")
local iojs_engine=$(read_json "$BUILD_DIR/package.json" ".engines.iojs")
local npm_engine=$(read_json "$BUILD_DIR/package.json" ".engines.npm")
local yarn_engine=$(read_json "$BUILD_DIR/package.json" ".engines.yarn")
local node_engine iojs_engine npm_engine yarn_engine npm_version node_version
if [ -n "$iojs_engine" ]; then
echo "engines.iojs (package.json): $iojs_engine (iojs)"
else
echo "engines.node (package.json): ${node_engine:-unspecified}"
fi
node_engine=$(read_json "$BUILD_DIR/package.json" ".engines.node")
npm_engine=$(read_json "$BUILD_DIR/package.json" ".engines.npm")
yarn_engine=$(read_json "$BUILD_DIR/package.json" ".engines.yarn")
meta_set "node-version-request" "$node_engine"
meta_set "npm-version-request" "$npm_engine"
meta_set "yarn-version-request" "$yarn_engine"
meta_set "node-version-request" "$node_engine"
meta_set "npm-version-request" "$npm_engine"
meta_set "yarn-version-request" "$yarn_engine"
echo "engines.node (package.json): ${node_engine:-unspecified}"
echo "engines.npm (package.json): ${npm_engine:-unspecified (use default)}"
if $YARN; then
echo "engines.yarn (package.json): ${yarn_engine:-unspecified (use default)}"
fi
echo ""
if [ -n "$iojs_engine" ]; then
warn_node_engine "$iojs_engine"
install_iojs "$iojs_engine" "$BUILD_DIR/.heroku/node"
local npm_version="$(npm --version)"
local node_version="$(node --version)"
echo "Using bundled npm version for iojs compatibility: $npm_version"
mcount "version.iojs.$node_version"
else
warn_node_engine "$node_engine"
monitor "install-node-binary" install_nodejs "$node_engine" "$BUILD_DIR/.heroku/node"
monitor "install-npm-binary" install_npm "$npm_engine" "$BUILD_DIR/.heroku/node" $NPM_LOCK
local node_version="$(node --version)"
mcount "version.node.$node_version"
fi
warn_node_engine "$node_engine"
monitor "install-node-binary" install_nodejs "$node_engine" "$BUILD_DIR/.heroku/node" "$(get_platform)"
monitor "install-npm-binary" install_npm "$npm_engine" "$BUILD_DIR/.heroku/node" $NPM_LOCK
node_version="$(node --version)"
mcount "version.node.$node_version"
meta_set "node-version" "$node_version"
# Download yarn if there is a yarn.lock file or if the user
# has specified a version of yarn under "engines". We'll still
# only install using yarn if there is a yarn.lock file
if $YARN || [ -n "$yarn_engine" ]; then
monitor "install-yarn-binary" install_yarn "$BUILD_DIR/.heroku/yarn" "$yarn_engine"
monitor "install-yarn-binary" install_yarn "$BUILD_DIR/.heroku/yarn" "$yarn_engine" "$(get_platform)"
fi
if $YARN; then
mcount "version.yarn.$(yarn --version)"
meta_set "yarn-version" "$(yarn --version)"
else
mcount "version.npm.$(npm --version)"
meta_set "npm-version" "$(npm --version)"
fi
warn_old_npm
......@@ -148,8 +176,10 @@ header "Installing binaries" | output "$LOG_FILE"
install_bins | output "$LOG_FILE"
restore_cache() {
local cache_status="$(get_cache_status)"
local cache_directories="$(get_cache_directories)"
local cache_status cache_directories
cache_status="$(get_cache_status "$CACHE_DIR")"
cache_directories="$(get_cache_directories "$BUILD_DIR")"
if $YARN; then
if [ -e "$BUILD_DIR/node_modules" ]; then
......@@ -166,7 +196,7 @@ restore_cache() {
if [[ "$cache_directories" == "" ]]; then
restore_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else
restore_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
restore_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$cache_directories"
fi
elif [[ "$cache_status" == "new-signature" ]]; then
header "Restoring cache"
......@@ -177,7 +207,7 @@ restore_cache() {
# If the user has specified custom cache directories, be more explicit
echo "Invalidating cache due to a change in version of node, npm, yarn or stack"
echo "Will not restore the following directories for this build:"
for directory in $(< $cache_directories); do
for directory in $cache_directories; do
echo " $directory"
done
fi
......@@ -187,15 +217,16 @@ restore_cache() {
fi
mcount "cache.$cache_status"
meta_set "cache-status" "$cache_status"
}
restore_cache | output "$LOG_FILE"
build_dependencies() {
run_if_present 'heroku-prebuild'
local cache_status start
local cache_status="$(get_cache_status)"
local start=$(nowms)
cache_status="$(get_cache_status "$CACHE_DIR")"
start=$(nowms)
if $YARN; then
yarn_node_modules "$BUILD_DIR"
......@@ -208,24 +239,20 @@ build_dependencies() {
mtime "modules.time.cache.$cache_status" "${start}"
if [[ "$NEW_BUILD_SCRIPT_BEHAVIOR" = true ]]; then
mcount "build-script.new-behavior"
run_build_script
else
mcount "build-script.legacy-behavior"
run_if_present 'heroku-postbuild'
fi
log_build_scripts
header "Build"
run_build_script "$BUILD_DIR"
}
header "Building dependencies" | output "$LOG_FILE"
log_build_scripts "$BUILD_DIR"
run_prebuild_script "$BUILD_DIR" | output "$LOG_FILE"
header "Installing dependencies" | output "$LOG_FILE"
build_dependencies | output "$LOG_FILE"
cache_build() {
local cache_directories="$(get_cache_directories)"
local cache_directories
cache_directories="$(get_cache_directories "$BUILD_DIR")"
clear_cache
clear_cache "$CACHE_DIR"
if ! ${NODE_MODULES_CACHE:-true}; then
# we've already warned that caching is disabled in the restore step
# so be silent here
......@@ -235,9 +262,9 @@ cache_build() {
save_default_cache_directories "$BUILD_DIR" "$CACHE_DIR"
else
header "Caching build"
save_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" $cache_directories
save_custom_cache_directories "$BUILD_DIR" "$CACHE_DIR" "$cache_directories"
fi
save_signature
save_signature "$CACHE_DIR"
}
cache_build | output "$LOG_FILE"
......@@ -259,14 +286,19 @@ summarize_build() {
fi
mmeasure 'modules.size' "$(measure_size)"
meta_set "node-modules-size" "$(measure_size)"
}
install_plugin $BP_DIR $BUILD_DIR
install_plugin "$BP_DIR" "$BUILD_DIR"
header "Build succeeded!" | output "$LOG_FILE"
mcount "compile"
summarize_build | output "$LOG_FILE"
meta_set "node-build-success" "true"
meta_time "build-time" "$build_start_time"
warn_no_start "$LOG_FILE"
warn_no_start "$BUILD_DIR"
warn_unmet_dep "$LOG_FILE"
warn_old_npm_lockfile $NPM_LOCK
log_meta_data >> "$BUILDPACK_LOG_FILE"
......@@ -17,7 +17,7 @@ error() {
exit 1
}
if [ -f $1/package.json ]; then
if [ -f "$1/package.json" ]; then
echo 'Node.js'
exit 0
fi
......@@ -59,7 +59,7 @@ If you are trying to deploy a Node.js application, ensure that this
file is present at the top level directory. This directory has the
following files:
$(ls -1p $1)
$(ls -1p "$1")
If you are trying to deploy an application written in another
language, you need to change the list of buildpacks set on your
......
#!/usr/bin/env bash
# bin/test-compile <build-dir> <cache-dir> <env-dir>
BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd)
### Configure environment
source $BP_DIR/lib/environment.sh
set -o errexit # always exit on error
set -o pipefail # don't ignore exit codes when piping output
### Configure directories
BP_DIR=$(cd "$(dirname "${0:-}")" || exit; cd ..; pwd)
### Load dependencies
# shellcheck source=lib/environment.sh
source "$BP_DIR/lib/environment.sh"
### Set up test Node environment
export NPM_CONFIG_PRODUCTION=${NPM_CONFIG_PRODUCTION:-false}
export NODE_ENV=${NODE_ENV:-test}
"$BP_DIR/bin/compile" "$1" "$2" "$3"
### Compile the app
"$BP_DIR/bin/compile" "$1" "$2" "$3"
write_ci_profile "$BP_DIR" "$1"
#!/usr/bin/env bash
export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin"
export NODE_HOME="$HOME/.heroku/node"
export NODE_ENV=${NODE_ENV:-test}
package main
import (
"encoding/xml"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"regexp"
"runtime"
"sort"
"strings"
"time"
"github.com/Masterminds/semver"
)
type result struct {
Name string `xml:"Name"`
KeyCount int `xml:"KeyCount"`
MaxKeys int `xml:"MaxKeys"`
IsTruncated bool `xml:"IsTruncated"`
ContinuationToken string `xml:"ContinuationToken"`
NextContinuationToken string `xml:"NextContinuationToken"`
Prefix string `xml:"Prefix"`
Contents []s3Object `xml:"Contents"`
}
type s3Object struct {
Key string `xml:"Key"`
LastModified time.Time `xml:"LastModified"`
ETag string `xml:"ETag"`
Size int `xml:"Size"`
StorageClass string `xml:"StorageClass"`
}
type release struct {
binary string
stage string
platform string
url string
version *semver.Version
}
type matchResult struct {
versionRequirement string
release release
matched bool
}
func main() {
if len(os.Args) < 3 {
printUsage()
os.Exit(0)
}
binary := os.Args[1]
versionRequirement := os.Args[2]
if binary == "node" {
objects, err := listS3Objects("heroku-nodebin", "node")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
result, err := resolveNode(objects, getPlatform(), versionRequirement)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if result.matched {
fmt.Printf("%s %s\n", result.release.version.String(), result.release.url)
} else {
fmt.Println("No result")
os.Exit(1)
}
} else if binary == "yarn" {
objects, err := listS3Objects("heroku-nodebin", "yarn")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
result, err := resolveYarn(objects, versionRequirement)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if result.matched {
fmt.Printf("%s %s\n", result.release.version.String(), result.release.url)
} else {
fmt.Println("No result")
os.Exit(1)
}
}
}
func printUsage() {
fmt.Println("resolve-version binary version-requirement")
}
func getPlatform() string {
if runtime.GOOS == "darwin" {
return "darwin-x64"
}
return "linux-x64"
}
func resolveNode(objects []s3Object, platform string, versionRequirement string) (matchResult, error) {
releases := []release{}
staging := []release{}
for _, obj := range objects {
release, err := parseObject(obj.Key)
if err != nil {
continue
}
// ignore any releases that are not for the given platform
if release.platform != platform {
continue
}
if release.stage == "release" {
releases = append(releases, release)
} else {
staging = append(staging, release)
}
}
result, err := matchReleaseSemver(releases, versionRequirement)
if err != nil {
return matchResult{}, err
}
// In order to accomodate integrated testing of staged Node binaries before they are
// released broadly, there is a special case where:
//
// - if there is no match to a Node binary AND
// - an exact version of a binary in `node/staging` is present
//
// the staging binary is used
if result.matched == false {
stagingResult := matchReleaseExact(staging, versionRequirement)
if stagingResult.matched {
return stagingResult, nil
}
}
return result, nil
}
func resolveYarn(objects []s3Object, versionRequirement string) (matchResult, error) {
releases := []release{}
for _, obj := range objects {
release, err := parseObject(obj.Key)
if err != nil {
continue
}
releases = append(releases, release)
}
return matchReleaseSemver(releases, versionRequirement)
}
func matchReleaseSemver(releases []release, versionRequirement string) (matchResult, error) {
rewrittenRequirement := rewriteRange(versionRequirement)
constraints, err := semver.NewConstraint(rewrittenRequirement)
if err != nil {
return matchResult{}, err
}
filtered := []release{}
for _, release := range releases {
if constraints.Check(release.version) {
filtered = append(filtered, release)
}
}
versions := make([]*semver.Version, len(filtered))
for i, rel := range filtered {
versions[i] = rel.version
}
coll := semver.Collection(versions)
sort.Sort(coll)
if len(coll) == 0 {
return matchResult{
versionRequirement: versionRequirement,
release: release{},
matched: false,
}, nil
}
resolvedVersion := coll[len(coll)-1]
for _, rel := range filtered {
if rel.version.Equal(resolvedVersion) {
return matchResult{
versionRequirement: versionRequirement,
release: rel,
matched: true,
}, nil
}
}
return matchResult{}, errors.New("Unknown error")
}
func matchReleaseExact(releases []release, version string) matchResult {
for _, release := range releases {
if release.version.String() == version {
return matchResult{
versionRequirement: version,
release: release,
matched: true,
}
}
}
return matchResult{
versionRequirement: version,
release: release{},
matched: false,
}
}
// Parses an S3 key into a struct of information about that release
// Example input: node/release/linux-x64/node-v6.2.2-linux-x64.tar.gz
func parseObject(key string) (release, error) {
nodeRegex := regexp.MustCompile("node\\/([^\\/]+)\\/([^\\/]+)\\/node-v([0-9]+\\.[0-9]+\\.[0-9]+)-([^.]*)(.*)\\.tar\\.gz")
yarnRegex := regexp.MustCompile("yarn\\/([^\\/]+)\\/yarn-v([0-9]+\\.[0-9]+\\.[0-9]+)\\.tar\\.gz")
if nodeRegex.MatchString(key) {
match := nodeRegex.FindStringSubmatch(key)
version, err := semver.NewVersion(match[3])
if err != nil {
return release{}, fmt.Errorf("Failed to parse version as semver:%s\n%s", match[3], err.Error())
}
return release{
binary: "node",
stage: match[1],
platform: match[2],
version: version,
url: fmt.Sprintf("https://s3.amazonaws.com/%s/node/%s/%s/node-v%s-%s.tar.gz", "heroku-nodebin", match[1], match[2], match[3], match[2]),
}, nil
}
if yarnRegex.MatchString(key) {
match := yarnRegex.FindStringSubmatch(key)
version, err := semver.NewVersion(match[2])
if err != nil {
return release{}, errors.New("Failed to parse version as semver")
}
return release{
binary: "yarn",
stage: match[1],
platform: "",
url: fmt.Sprintf("https://s3.amazonaws.com/heroku-nodebin/yarn/release/yarn-v%s.tar.gz", version),
version: version,
}, nil
}
return release{}, fmt.Errorf("Failed to parse key: %s", key)
}
// Wrapper around the S3 API for listing objects
// This maps directly to the API and parses the XML response but will not handle
// paging and offsets automaticaly
func fetchS3Result(bucketName string, options map[string]string) (result, error) {
var result result
v := url.Values{}
v.Set("list-type", "2")
for key, val := range options {
v.Set(key, val)
}
url := fmt.Sprintf("https://%s.s3.amazonaws.com?%s", bucketName, v.Encode())
resp, err := http.Get(url)
if err != nil {
return result, err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return result, err
}
return result, xml.Unmarshal(body, &result)
}
// Query the S3 API for a list of all the objects in an S3 bucket with a
// given prefix. This will handle the inherent 1000 item limit and paging
// for you
func listS3Objects(bucketName string, prefix string) ([]s3Object, error) {
var out = []s3Object{}
var options = map[string]string{"prefix": prefix}
for {
result, err := fetchS3Result(bucketName, options)
if err != nil {
return nil, err
}
out = append(out, result.Contents...)
if !result.IsTruncated {
break
}
options["continuation-token"] = result.NextContinuationToken
}
return out, nil
}
// regex matching the semver version definitions
// Ex:
// v1.0.0
// 9
// 8.x
const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
// regex matching the semver operators
const ops string = `=<|~>|!=|>|<|>=|=>|<=|\^|=|~`
// Masterminds/semver does not support constraints like: `>1 <2`, preferring
// `>1, <2` with a comma separator. This catches this particular case and
// rewrites it
func rewriteRange(c string) string {
constraintRangeRegex := regexp.MustCompile(fmt.Sprintf(
`^\s*(%s)(\s*%s)\s*(%s)(\s*%s)$`,
ops, cvRegex, ops, cvRegex,
))
ors := strings.Split(c, "||")
out := make([]string, len(ors))
for i, v := range ors {
m := constraintRangeRegex.FindStringSubmatch(v)
if m != nil {
out[i] = fmt.Sprintf("%s%s, %s%s", m[1], m[2], m[12], m[13])
} else {
out[i] = v
}
}
return strings.Join(out, `||`)
}
// +build integration
package main
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestListS3Objects(t *testing.T) {
// Node
objects, err := listS3Objects("heroku-nodebin", "node")
assert.Nil(t, err)
assert.NotEmpty(t, objects)
// every returned result started with "node"
for _, obj := range objects {
assert.Regexp(t, regexp.MustCompile("^node"), obj.Key)
}
// every node object must parse as a valid release
for _, obj := range objects {
release, err := parseObject(obj.Key)
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("https:\\/\\/s3.amazonaws.com\\/heroku-nodebin"), release.url)
assert.Regexp(t, regexp.MustCompile("[0-9]+.[0-9]+.[0-9]+"), release.version.String())
}
// Yarn
objects, err = listS3Objects("heroku-nodebin", "yarn")
assert.Nil(t, err)
assert.NotEmpty(t, objects)
// every returned result started with "yarn"
for _, obj := range objects {
assert.Regexp(t, regexp.MustCompile("^yarn"), obj.Key)
}
// every yarn object must parse as a valid release
for _, obj := range objects {
release, err := parseObject(obj.Key)
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("https:\\/\\/s3.amazonaws.com\\/heroku-nodebin"), release.url)
assert.Regexp(t, regexp.MustCompile("[0-9]+.[0-9]+.[0-9]+"), release.version.String())
}
}
package main
import (
"fmt"
"testing"
"time"
"github.com/Masterminds/semver"
"github.com/stretchr/testify/assert"
)
func TestParseObject(t *testing.T) {
release, err := parseObject("node/release/linux-x64/node-v6.2.2-linux-x64.tar.gz")
assert.Nil(t, err)
assert.Equal(t, release.binary, "node")
assert.Equal(t, release.stage, "release")
assert.Equal(t, release.platform, "linux-x64")
assert.Equal(t, release.version.String(), "6.2.2")
release, err = parseObject("node/release/darwin-x64/node-v8.14.1-darwin-x64.tar.gz")
assert.Nil(t, err)
assert.Equal(t, release.binary, "node")
assert.Equal(t, release.stage, "release")
assert.Equal(t, release.platform, "darwin-x64")
assert.Equal(t, release.version.String(), "8.14.1")
release, err = parseObject("node/staging/darwin-x64/node-v6.17.0-darwin-x64.tar.gz")
assert.Nil(t, err)
assert.Equal(t, release.binary, "node")
assert.Equal(t, release.stage, "staging")
assert.Equal(t, release.platform, "darwin-x64")
assert.Equal(t, release.version.String(), "6.17.0")
release, err = parseObject("yarn/release/yarn-v1.9.1.tar.gz")
assert.Nil(t, err)
assert.Equal(t, release.binary, "yarn")
assert.Equal(t, release.stage, "release")
assert.Equal(t, release.platform, "")
assert.Equal(t, release.version.String(), "1.9.1")
release, err = parseObject("something/weird")
assert.NotNil(t, err)
assert.Equal(t, err.Error(), "Failed to parse key: something/weird")
}
func genReleasesFromArray(versions []string) []release {
out := []release{}
for _, version := range versions {
out = append(out, release{
binary: "node",
stage: "release",
platform: "linux-x64",
url: "https://heroku.com",
version: semver.MustParse(version),
})
}
return out
}
func TestMatchReleaseExact(t *testing.T) {
releases := genReleasesFromArray([]string{"1.0.0", "1.0.1", "1.0.2"})
result := matchReleaseExact(releases, "1.0.1")
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), "1.0.1")
result = matchReleaseExact(releases, "1.0.2")
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), "1.0.2")
result = matchReleaseExact(releases, "1.0.3")
assert.False(t, result.matched)
assert.Equal(t, result.versionRequirement, "1.0.3")
}
type Case struct {
input string
output string
}
func TestMatchReleaseSemver(t *testing.T) {
// The current supported releases as of 9/16/2019
releases := genReleasesFromArray([]string{
"10.0.0", "10.1.0", "10.10.0", "10.11.0", "10.12.0", "10.13.0", "10.14.0", "10.14.1", "10.14.2", "10.15.0",
"10.15.1", "10.15.2", "10.15.3", "10.2.0", "10.2.1", "10.3.0", "10.4.0", "10.4.1", "10.5.0", "10.6.0",
"10.7.0", "10.8.0", "10.9.0", "11.0.0", "11.1.0", "11.10.0", "11.10.1", "11.11.0", "11.12.0", "11.13.0",
"11.14.0", "11.2.0", "11.3.0", "11.4.0", "11.5.0", "11.6.0", "11.7.0", "11.8.0", "11.9.0", "6.0.0",
"6.1.0", "6.10.0", "6.10.1", "6.10.2", "6.10.3", "6.11.0", "6.11.1", "6.11.2", "6.11.3", "6.11.4",
"6.11.5", "6.12.0", "6.12.1", "6.12.2", "6.12.3", "6.13.0", "6.13.1", "6.14.0", "6.14.1", "6.14.2",
"6.14.3", "6.14.4", "6.15.0", "6.15.1", "6.16.0", "6.17.0", "6.17.1", "6.2.0", "6.2.1", "6.2.2",
"6.3.0", "6.3.1", "6.4.0", "6.5.0", "6.6.0", "6.7.0", "6.8.0", "6.8.1", "6.9.0", "6.9.1", "6.9.2",
"6.9.3", "6.9.4", "6.9.5", "8.0.0", "8.1.0", "8.1.1", "8.1.2", "8.1.3", "8.1.4", "8.10.0", "8.11.0",
"8.11.1", "8.11.2", "8.11.3", "8.11.4", "8.12.0", "8.13.0", "8.14.0", "8.14.1", "8.15.0", "8.15.1",
"8.16.0", "8.2.0", "8.2.1", "8.3.0", "8.4.0", "8.5.0", "8.6.0", "8.7.0", "8.8.0", "8.8.1", "8.9.0",
"8.9.1", "8.9.2", "8.9.3", "8.9.4",
})
// Semver requirements pulled from real apps
cases := []Case{
Case{input: "10.x", output: "10.15.3"},
Case{input: "10.*", output: "10.15.3"},
Case{input: "10", output: "10.15.3"},
Case{input: "8.x", output: "8.16.0"},
Case{input: "^8.11.3", output: "8.16.0"},
Case{input: "~8.11.3", output: "8.11.4"},
Case{input: ">= 6.0.0", output: "11.14.0"},
Case{input: "^6.9.0 || ^8.9.0 || ^10.13.0", output: "10.15.3"},
Case{input: "6.* || 8.* || >= 10.*", output: "11.14.0"},
Case{input: ">= 6.11.1 <= 10", output: "10.15.3"},
// TODO: Masterminds/semver interprets this as `< 11.x`
// Case{input: ">=8.10 <11", output: "10.15.3"},
}
for _, c := range cases {
result, err := matchReleaseSemver(releases, c.input)
assert.Nil(t, err)
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), c.output)
}
result, err := matchReleaseSemver(releases, "99.x")
assert.Nil(t, err)
assert.False(t, result.matched)
assert.Equal(t, result.versionRequirement, "99.x")
}
func genYarnS3ObjectList(versions []string) []s3Object {
out := []s3Object{}
for _, version := range versions {
out = append(out, s3Object{
Key: fmt.Sprintf("yarn/release/yarn-v%s.tar.gz", version),
LastModified: time.Time{},
ETag: "abcdef",
Size: 0,
StorageClass: "normal",
})
}
return out
}
func TestResolveYarn(t *testing.T) {
// yarn releases as of 4/18/2019
objects := genYarnS3ObjectList([]string{
"0.16.0", "0.16.1", "0.17.0", "0.17.10", "0.17.2", "0.17.3", "0.17.4", "0.17.5", "0.17.6",
"0.17.7", "0.17.8", "0.17.9", "0.18.0", "0.18.1", "0.18.2", "0.19.0", "0.19.1", "0.20.0",
"0.20.3", "0.20.4", "0.21.0", "0.21.1", "0.21.2", "0.21.3", "0.22.0", "0.23.0", "0.23.2",
"0.23.3", "0.23.4", "0.24.0", "0.24.1", "0.24.2", "0.24.3", "0.24.4", "0.24.5", "0.24.6",
"0.25.1", "0.25.2", "0.25.3", "0.25.4", "0.26.1", "0.27.0", "0.27.1", "0.27.2", "0.27.3",
"0.27.4", "0.27.5", "0.28.1", "0.28.4", "1.0.0", "1.0.1", "1.0.2", "1.1.0", "1.10.0",
"1.10.1", "1.11.0", "1.11.1", "1.12.0", "1.12.1", "1.12.3", "1.13.0", "1.14.0", "1.15.0",
"1.15.1", "1.15.2", "1.2.0", "1.2.1", "1.3.2", "1.4.0", "1.5.1", "1.6.0", "1.7.0", "1.8.0",
"1.9.1", "1.9.2", "1.9.4",
})
cases := []Case{
Case{input: "1.13.0", output: "1.13.0"},
Case{input: "1.15.2", output: "1.15.2"},
Case{input: "1.x", output: "1.15.2"},
Case{input: "*", output: "1.15.2"},
Case{input: "^1.12.1", output: "1.15.2"},
Case{input: "^1.9.4", output: "1.15.2"},
Case{input: ">= 1.0.0", output: "1.15.2"},
Case{input: "^1.0", output: "1.15.2"},
Case{input: "0.24.6 - 1.x", output: "1.15.2"},
Case{input: "1.*.*", output: "1.15.2"},
Case{input: "^v1.0.1", output: "1.15.2"},
Case{input: "1.13 - 1.16", output: "1.15.2"},
Case{input: ">=1.9.4 <2.0.0", output: "1.15.2"},
}
for _, c := range cases {
result, err := resolveYarn(objects, c.input)
if assert.Nil(t, err) {
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), c.output)
assert.Equal(t, result.release.url, fmt.Sprintf("https://s3.amazonaws.com/heroku-nodebin/yarn/release/yarn-v%s.tar.gz", c.output))
}
}
}
func genNodeS3ObjectList(releaseVersions []string, stagingVersions []string, platform string) []s3Object {
out := []s3Object{}
for _, version := range releaseVersions {
out = append(out, s3Object{
Key: fmt.Sprintf("node/release/%s/node-v%s-%s.tar.gz", platform, version, platform),
LastModified: time.Time{},
ETag: "abcdef",
Size: 0,
StorageClass: "normal",
})
}
for _, version := range stagingVersions {
out = append(out, s3Object{
Key: fmt.Sprintf("node/staging/%s/node-v%s-%s.tar.gz", platform, version, platform),
LastModified: time.Time{},
ETag: "abcdef",
Size: 0,
StorageClass: "normal",
})
}
return out
}
func TestResolveNode(t *testing.T) {
releasedVersions := []string{
"10.0.0", "10.1.0", "10.10.0", "10.11.0", "10.12.0", "10.13.0", "10.14.0", "10.14.1", "10.14.2", "10.15.0",
"10.15.1", "10.15.2", "10.15.3", "10.2.0", "10.2.1", "10.3.0", "10.4.0", "10.4.1", "10.5.0", "10.6.0",
"10.7.0", "10.8.0", "10.9.0", "11.0.0", "11.1.0", "11.10.0", "11.10.1", "11.11.0", "11.12.0", "11.13.0",
"11.14.0", "11.2.0", "11.3.0", "11.4.0", "11.5.0", "11.6.0", "11.7.0", "11.8.0", "11.9.0", "6.0.0",
"6.1.0", "6.10.0", "6.10.1", "6.10.2", "6.10.3", "6.11.0", "6.11.1", "6.11.2", "6.11.3", "6.11.4",
"6.11.5", "6.12.0", "6.12.1", "6.12.2", "6.12.3", "6.13.0", "6.13.1", "6.14.0", "6.14.1", "6.14.2",
"6.14.3", "6.14.4", "6.15.0", "6.15.1", "6.16.0", "6.17.0", "6.17.1", "6.2.0", "6.2.1", "6.2.2",
"6.3.0", "6.3.1", "6.4.0", "6.5.0", "6.6.0", "6.7.0", "6.8.0", "6.8.1", "6.9.0", "6.9.1", "6.9.2",
"6.9.3", "6.9.4", "6.9.5", "8.0.0", "8.1.0", "8.1.1", "8.1.2", "8.1.3", "8.1.4", "8.10.0", "8.11.0",
"8.11.1", "8.11.2", "8.11.3", "8.11.4", "8.12.0", "8.13.0", "8.14.0", "8.14.1", "8.15.0", "8.15.1",
"8.16.0", "8.2.0", "8.2.1", "8.3.0", "8.4.0", "8.5.0", "8.6.0", "8.7.0", "8.8.0", "8.8.1", "8.9.0",
"8.9.1", "8.9.2", "8.9.3", "8.9.4",
}
objects := genNodeS3ObjectList(releasedVersions, []string{}, "linux-x64")
// Semver requirements pulled from real apps
cases := []Case{
Case{input: "10.x", output: "10.15.3"},
Case{input: "10.*", output: "10.15.3"},
Case{input: "10", output: "10.15.3"},
Case{input: "8.x", output: "8.16.0"},
Case{input: "^8.11.3", output: "8.16.0"},
Case{input: "~8.11.3", output: "8.11.4"},
Case{input: ">= 6.0.0", output: "11.14.0"},
Case{input: "^6.9.0 || ^8.9.0 || ^10.13.0", output: "10.15.3"},
Case{input: "6.* || 8.* || >= 10.*", output: "11.14.0"},
Case{input: ">= 6.11.1 <= 10", output: "10.15.3"},
// TODO: Masterminds/semver interprets this as `< 11.x`
// Case{input: ">=8.10 <11", output: "10.15.3"},
}
for _, c := range cases {
result, err := resolveNode(objects, "linux-x64", c.input)
if assert.Nil(t, err) {
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), c.output)
}
}
for _, c := range cases {
result, err := resolveNode(objects, "darwin-x64", c.input)
if assert.Nil(t, err) {
assert.False(t, result.matched)
assert.Equal(t, result.versionRequirement, c.input)
}
}
}
func TestResolveNodeStaging(t *testing.T) {
releasedVersions := []string{
"10.0.0", "10.1.0", "10.10.0", "10.11.0", "10.12.0", "10.13.0", "10.14.0", "10.14.1", "10.14.2", "10.15.0",
"10.15.1", "10.15.2", "10.15.3", "10.2.0", "10.2.1", "10.3.0", "10.4.0", "10.4.1", "10.5.0", "10.6.0",
"10.7.0", "10.8.0", "10.9.0", "11.0.0", "11.1.0", "11.10.0", "11.10.1", "11.11.0", "11.12.0", "11.13.0",
"11.14.0", "11.2.0", "11.3.0", "11.4.0", "11.5.0", "11.6.0", "11.7.0", "11.8.0", "11.9.0", "6.0.0",
"6.1.0", "6.10.0", "6.10.1", "6.10.2", "6.10.3", "6.11.0", "6.11.1", "6.11.2", "6.11.3", "6.11.4",
"6.11.5", "6.12.0", "6.12.1", "6.12.2", "6.12.3", "6.13.0", "6.13.1", "6.14.0", "6.14.1", "6.14.2",
"6.14.3", "6.14.4", "6.15.0", "6.15.1", "6.16.0", "6.17.0", "6.17.1", "6.2.0", "6.2.1", "6.2.2",
"6.3.0", "6.3.1", "6.4.0", "6.5.0", "6.6.0", "6.7.0", "6.8.0", "6.8.1", "6.9.0", "6.9.1", "6.9.2",
"6.9.3", "6.9.4", "6.9.5", "8.0.0", "8.1.0", "8.1.1", "8.1.2", "8.1.3", "8.1.4", "8.10.0", "8.11.0",
"8.11.1", "8.11.2", "8.11.3", "8.11.4", "8.12.0", "8.13.0", "8.14.0", "8.14.1", "8.15.0", "8.15.1",
"8.16.0", "8.2.0", "8.2.1", "8.3.0", "8.4.0", "8.5.0", "8.6.0", "8.7.0", "8.8.0", "8.8.1", "8.9.0",
"8.9.1", "8.9.2", "8.9.3", "8.9.4",
}
platforms := []string{"linux-x64", "darwin-x64"}
for _, platform := range platforms {
// staging has a few releases that were already released, but one: 10.15.4 that has not been
objects := genNodeS3ObjectList(releasedVersions, []string{"10.15.1", "10.15.2", "10.15.3", "10.15.4"}, platform)
result, err := resolveNode(objects, platform, "10.15.1")
if assert.Nil(t, err) {
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), "10.15.1")
assert.Equal(t, result.versionRequirement, "10.15.1")
if platform == "linux-x64" {
assert.Equal(t, result.release.url, "https://s3.amazonaws.com/heroku-nodebin/node/release/linux-x64/node-v10.15.1-linux-x64.tar.gz")
} else {
assert.Equal(t, result.release.url, "https://s3.amazonaws.com/heroku-nodebin/node/release/darwin-x64/node-v10.15.1-darwin-x64.tar.gz")
}
}
result, err = resolveNode(objects, platform, "10.15.4")
if assert.Nil(t, err) {
assert.True(t, result.matched)
assert.Equal(t, result.release.version.String(), "10.15.4")
assert.Equal(t, result.versionRequirement, "10.15.4")
if platform == "linux-x64" {
assert.Equal(t, result.release.url, "https://s3.amazonaws.com/heroku-nodebin/node/staging/linux-x64/node-v10.15.4-linux-x64.tar.gz")
} else {
assert.Equal(t, result.release.url, "https://s3.amazonaws.com/heroku-nodebin/node/staging/darwin-x64/node-v10.15.4-darwin-x64.tar.gz")
}
}
result, err = resolveNode(objects, platform, "10.15.5")
if assert.Nil(t, err) {
assert.False(t, result.matched)
assert.Equal(t, result.versionRequirement, "10.15.5")
}
}
}
func TestRewriteRange(t *testing.T) {
cases := []Case{
Case{input: "10.x", output: "10.x"},
Case{input: "10.*", output: "10.*"},
Case{input: "10", output: "10"},
Case{input: "8.x", output: "8.x"},
Case{input: "^8.11.3", output: "^8.11.3"},
Case{input: "~8.11.3", output: "~8.11.3"},
Case{input: ">= 6.0.0", output: ">= 6.0.0"},
Case{input: "^6.9.0 || ^8.9.0 || ^10.13.0", output: "^6.9.0 || ^8.9.0 || ^10.13.0"},
Case{input: "6.* || 8.* || >= 10.*", output: "6.* || 8.* || >= 10.*"},
Case{input: ">= 6.11.1 <= 10", output: ">= 6.11.1, <= 10"},
Case{input: ">=8.10 <11", output: ">=8.10, <11"},
Case{input: ">1<2", output: ">1, <2"},
Case{input: ">1<", output: ">1<"},
}
for _, c := range cases {
out := rewriteRange(c.input)
assert.Equal(t, c.output, out)
}
}
......@@ -2,7 +2,7 @@
[ "$CI" != "true" ] && echo "Not running on CI!" && exit 1
git config --global user.email ${HEROKU_API_USER:-"buildpack@example.com"}
git config --global user.email "${HEROKU_API_USER:-"buildpack@example.com"}"
git config --global user.name 'BuildpackTester'
cat <<EOF >> ~/.ssh/config
......
......@@ -26,14 +26,16 @@ if [ -z "$HEROKU_API_KEY" ]; then
fi
if [ -n "$CIRCLE_BRANCH" ]; then
export HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH"
HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH"
elif [ -n "$TRAVIS_PULL_REQUEST_BRANCH" ]; then
export IS_RUNNING_ON_TRAVIS=true
export HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH"
HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH"
else
export HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##')
HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##')
fi
export HATCHET_BUILDPACK_BRANCH
gem install bundler
bundle install
......
......@@ -7,7 +7,7 @@ BP_NAME=${1:-"heroku/nodejs"}
curVersion=$(heroku buildpacks:versions "$BP_NAME" | awk 'FNR == 3 { print $1 }')
newVersion="v$((curVersion + 1))"
read -p "Deploy as version: $newVersion [y/n]? " choice
read -r -p "Deploy as version: $newVersion [y/n]? " choice
case "$choice" in
y|Y ) echo "";;
n|N ) exit 0;;
......
module github.com/heroku/heroku-buildpack-nodejs
go 1.12
require (
github.com/Masterminds/semver v1.4.2
github.com/stretchr/testify v1.3.0
)
github.com/Masterminds/semver v1.4.2 h1:WBLTQ37jOCzSLtXNdoo8bNM8876KhNqOKvrlGITgsTc=
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
#!/usr/bin/env bash
RESOLVE="$BP_DIR/vendor/resolve-version-$(get_os)"
install_yarn() {
local dir="$1"
local version=${2:-1.x}
local number
local url
local platform="$3"
local number url code nodebin_result resolve_result
echo "Resolving yarn version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/yarn/$platform/latest.txt"); then
fail_bin_install yarn $version;
nodebin_result=$(curl --fail --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/yarn/$platform/latest.txt" || echo "failed")
resolve_result=$($RESOLVE yarn "$version" || echo "failed")
if [[ "$nodebin_result" == "failed" ]]; then
fail_bin_install yarn "$version" "$platform"
fi
read -r number url < <(echo "$nodebin_result")
# log out whether the new logic matches the old logic
if [[ "$nodebin_result" != "$resolve_result" ]]; then
meta_set "resolve-matches-nodebin-yarn" "false"
else
meta_set "resolve-matches-nodebin-yarn" "true"
fi
# log out when the new logic fails
if [[ "$resolve_result" == "failed" ]]; then
meta_set "resolve-failed-yarn" "true"
fi
echo "Downloading and installing yarn ($number)..."
local code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/yarn.tar.gz --write-out "%{http_code}")
code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/yarn.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then
echo "Unable to download yarn: $code" && false
fi
rm -rf $dir
rm -rf "$dir"
mkdir -p "$dir"
# https://github.com/yarnpkg/yarn/issues/770
if tar --version | grep -q 'gnu'; then
......@@ -22,54 +43,58 @@ install_yarn() {
else
tar xzf /tmp/yarn.tar.gz -C "$dir" --strip 1
fi
chmod +x $dir/bin/*
chmod +x "$dir"/bin/*
echo "Installed yarn $(yarn --version)"
}
install_nodejs() {
local version=${1:-10.x}
local dir="${2:?}"
local platform="$3"
local code os cpu nodebin_result resolve_result
os=$(get_os)
cpu=$(get_cpu)
echo "Resolving node version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt"); then
fail_bin_install node $version;
fi
nodebin_result=$(curl --silent --fail --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt" || echo "failed")
resolve_result=$($RESOLVE node "$version" || echo "failed")
echo "Downloading and installing node $number..."
local code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/node.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then
echo "Unable to download node: $code" && false
read -r number url < <(echo "$nodebin_result")
if [[ "$nodebin_result" == "failed" ]]; then
fail_bin_install node "$version" "$platform"
fi
tar xzf /tmp/node.tar.gz -C /tmp
rm -rf "$dir"/*
mv /tmp/node-v$number-$os-$cpu/* $dir
chmod +x $dir/bin/*
}
install_iojs() {
local version="$1"
local dir="$2"
# log out whether the new logic matches the old logic
if [[ "$nodebin_result" != "$resolve_result" ]]; then
meta_set "resolve-matches-nodebin-node" "false"
else
meta_set "resolve-matches-nodebin-node" "true"
fi
echo "Resolving iojs version ${version:-(latest stable)}..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/iojs/$platform/latest.txt"); then
fail_bin_install iojs $version;
# log out when the new logic fails
if [[ "$resolve_result" == "failed" ]]; then
meta_set "resolve-failed-node" "true"
fi
echo "Downloading and installing iojs $number..."
local code=$(curl "$url" --silent --fail --retry 5 --retry-max-time 15 -o /tmp/iojs.tar.gz --write-out "%{http_code}")
echo "Downloading and installing node $number..."
code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/node.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then
echo "Unable to download iojs: $code" && false
echo "Unable to download node: $code" && false
fi
tar xzf /tmp/iojs.tar.gz -C /tmp
mv /tmp/iojs-v$number-$os-$cpu/* $dir
chmod +x $dir/bin/*
tar xzf /tmp/node.tar.gz -C /tmp
rm -rf "${dir:?}"/*
mv /tmp/node-v"$number"-"$os"-"$cpu"/* "$dir"
chmod +x "$dir"/bin/*
}
install_npm() {
local npm_version
local version="$1"
local dir="$2"
local npm_lock="$3"
local npm_version="$(npm --version)"
npm_version="$(npm --version)"
# If the user has not specified a version of npm, but has an npm lockfile
# upgrade them to npm 5.x if a suitable version was not installed with Node
......
# variable shared by this whole module
BUILD_DATA_FILE=""
bd_create() {
local cache_dir="$1"
BUILD_DATA_FILE="$cache_dir/build-data/node"
kv_create $BUILD_DATA_FILE
}
bd_get() {
kv_get $BUILD_DATA_FILE "$1"
}
bd_set() {
kv_set $BUILD_DATA_FILE "$1" "$2"
}
log_build_data() {
# print all values on one line in logfmt format
# https://brandur.org/logfmt
echo $(kv_list $BUILD_DATA_FILE)
}
#!/usr/bin/env bash
log_initial_state() {
meta_set "buildpack" "nodejs"
if "$YARN"; then
meta_set "node-package-manager" "yarn"
meta_set "has-node-lock-file" "true"
else
meta_set "node-package-manager" "npm"
meta_set "has-node-lock-file" "$NPM_LOCK"
fi
meta_set "stack" "$STACK"
}
generate_uuids() {
# generate a unique id for each build
meta_set "build-uuid" "$(uuid)"
# propagate an app-uuid forward unless the cache is cleared
if [[ -n "$(meta_prev_get "app-uuid")" ]]; then
meta_set "app-uuid" "$(meta_prev_get "app-uuid")"
else
meta_set "app-uuid" "$(uuid)"
fi
}
log_build_script_opt_in() {
local opted_in="$1"
local build_dir="$2"
local has_build_script has_heroku_build_script
has_build_script=$(read_json "$build_dir/package.json" ".scripts.build")
has_heroku_build_script=$(read_json "$build_dir/package.json" ".scripts[\"heroku-postbuild\"]")
# if this app will be affected by the change
if [[ -z "$has_heroku_build_script" ]] && [[ -n "$has_build_script" ]]; then
mcount "affected-by-build-change"
if [[ "$opted_in" = "true" ]]; then
mcount "affected-by-build-change-opted-in"
meta_set "affected-but-opted-in" "true"
else
meta_set "affected-but-opted-in" "false"
fi
fi
if [[ "$opted_in" = true ]]; then
meta_set "build-script-opt-in" "true"
else
meta_set "build-script-opt-in" "false"
fi
}
\ No newline at end of file
source $BP_DIR/lib/binaries.sh
#!/usr/bin/env bash
create_signature() {
echo "v2; ${STACK}; $(node --version); $(npm --version); $(yarn --version 2>/dev/null || true); ${PREBUILD}"
}
save_signature() {
create_signature > $CACHE_DIR/node/signature
local cache_dir="$1"
create_signature > "$cache_dir/node/signature"
}
load_signature() {
if test -f $CACHE_DIR/node/signature; then
cat $CACHE_DIR/node/signature
local cache_dir="$1"
if test -f "$cache_dir/node/signature"; then
cat "$cache_dir/node/signature"
else
echo ""
fi
}
get_cache_status() {
local cache_dir="$1"
if ! ${NODE_MODULES_CACHE:-true}; then
echo "disabled"
elif ! test -d "${CACHE_DIR}/node/"; then
elif ! test -d "$cache_dir/node/"; then
echo "not-found"
elif [ "$(create_signature)" != "$(load_signature)" ]; then
elif [ "$(create_signature)" != "$(load_signature "$cache_dir")" ]; then
echo "new-signature"
else
echo "valid"
......@@ -29,8 +32,10 @@ get_cache_status() {
}
get_cache_directories() {
local dirs1=$(read_json "$BUILD_DIR/package.json" ".cacheDirectories | .[]?")
local dirs2=$(read_json "$BUILD_DIR/package.json" ".cache_directories | .[]?")
local build_dir="$1"
local dirs1 dirs2
dirs1=$(read_json "$build_dir/package.json" ".cacheDirectories | .[]?")
dirs2=$(read_json "$build_dir/package.json" ".cache_directories | .[]?")
if [ -n "$dirs1" ]; then
echo "$dirs1"
......@@ -61,9 +66,11 @@ restore_default_cache_directories() {
}
restore_custom_cache_directories() {
local cache_directories
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
# Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):"
......@@ -83,9 +90,10 @@ restore_custom_cache_directories() {
}
clear_cache() {
rm -rf $CACHE_DIR/node
mkdir -p $CACHE_DIR/node
mkdir -p $CACHE_DIR/node/cache
local cache_dir="$1"
rm -rf "$cache_dir/node"
mkdir -p "$cache_dir/node"
mkdir -p "$cache_dir/node/cache"
}
save_default_cache_directories() {
......@@ -106,16 +114,21 @@ save_default_cache_directories() {
# bower_components
if [[ -e "$build_dir/bower_components" ]]; then
mcount "cache.saved-bower-components"
meta_set "cached-bower-components" "true"
echo "- bower_components"
mkdir -p "$cache_dir/node/cache/bower_components"
cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/cache/bower_components")"
fi
meta_set "node-custom-cache-dirs" "false"
}
save_custom_cache_directories() {
local cache_directories
local build_dir=${1:-}
local cache_dir=${2:-}
local cache_directories=("${@:3}")
# Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):"
......@@ -128,4 +141,6 @@ save_custom_cache_directories() {
echo "- $cachepath (nothing to cache)"
fi
done
meta_set "node-custom-cache-dirs" "true"
}
#!/usr/bin/env bash
measure_size() {
(du -s node_modules 2>/dev/null || echo 0) | awk '{print $1}'
}
......@@ -5,7 +7,7 @@ measure_size() {
list_dependencies() {
local build_dir="$1"
cd "$build_dir"
cd "$build_dir" || return
if $YARN; then
echo ""
(yarn list --depth=0 || true) 2>/dev/null
......@@ -16,12 +18,21 @@ list_dependencies() {
}
run_if_present() {
local script_name=${1:-}
local has_script=$(read_json "$BUILD_DIR/package.json" ".scripts[\"$script_name\"]")
if [ -n "$has_script" ]; then
local build_dir=${1:-}
local script_name=${2:-}
local has_script_name
local script
has_script_name=$(has_script "$build_dir/package.json" "$script_name")
script=$(read_json "$build_dir/package.json" ".scripts[\"$script_name\"]")
if [[ "$has_script_name" == "true" ]]; then
if $YARN; then
echo "Running $script_name (yarn)"
monitor "$script_name" yarn run "$script_name"
# yarn will throw an error if the script is an empty string, so check for this case
if [[ -n "$script" ]]; then
monitor "$script_name" yarn run "$script_name"
fi
else
echo "Running $script_name"
monitor "$script_name" npm run "$script_name" --if-present
......@@ -29,88 +40,46 @@ run_if_present() {
fi
}
run_build_script() {
local has_build_script=$(read_json "$BUILD_DIR/package.json" ".scripts.build")
local has_heroku_build_script=$(read_json "$BUILD_DIR/package.json" ".scripts[\"heroku-postbuild\"]")
run_prebuild_script() {
local build_dir=${1:-}
local has_heroku_prebuild_script
if [[ -n "$has_heroku_build_script" ]] && [[ -n "$has_build_script" ]]; then
echo "Detected both 'build' and 'heroku-postbuild' scripts"
mcount "scripts.heroku-postbuild-and-build"
run_if_present 'heroku-postbuild'
elif [[ -n "$has_heroku_build_script" ]]; then
mcount "scripts.heroku-postbuild"
run_if_present 'heroku-postbuild'
elif [[ -n "$has_build_script" ]]; then
mcount "scripts.build"
run_if_present 'build'
fi
}
has_heroku_prebuild_script=$(has_script "$build_dir/package.json" "heroku-prebuild")
warn_build_script_behavior_opt_in() {
local opted_in="$1"
if [[ "$opted_in" = true ]]; then
header "Opting in to new default build script behavior"
echo "You have set \"heroku-run-build-script\" = true in your package.json"
echo ""
echo "- If a \"build\" script is defined in package.json it will be executed by default"
echo "- The \"heroku-postbuild\" script will be executed instead if present"
if [[ "$has_heroku_prebuild_script" == "true" ]]; then
mcount "script.heroku-prebuild"
header "Prebuild"
run_if_present "$build_dir" 'heroku-prebuild'
fi
}
log_build_scripts() {
local build=$(read_json "$BUILD_DIR/package.json" ".scripts[\"build\"]")
local heroku_prebuild=$(read_json "$BUILD_DIR/package.json" ".scripts[\"heroku-prebuild\"]")
local heroku_postbuild=$(read_json "$BUILD_DIR/package.json" ".scripts[\"heroku-postbuild\"]")
local postinstall=$(read_json "$BUILD_DIR/package.json" ".scripts[\"heroku-postbuild\"]")
if [ -n "$build" ]; then
mcount "scripts.build"
if [ -z "$heroku_postbuild" ]; then
mcount "scripts.build-without-heroku-postbuild"
fi
if [ -z "$postinstall" ]; then
mcount "scripts.build-without-postinstall"
fi
if [ -z "$postinstall" ] && [ -z "$heroku_postbuild" ]; then
mcount "scripts.build-without-other-hooks"
fi
fi
if [ -n "$postinstall" ]; then
mcount "scripts.postinstall"
if [ "$postinstall" == "npm run build" ] ||
[ "$postinstall" == "yarn run build" ] ||
[ "$postinstall" == "yarn build" ]; then
mcount "scripts.postinstall-is-npm-build"
fi
fi
run_build_script() {
local build_dir=${1:-}
local has_build_script has_heroku_build_script
if [ -n "$heroku_prebuild" ]; then
mcount "scripts.heroku-prebuild"
fi
has_build_script=$(has_script "$build_dir/package.json" "build")
has_heroku_build_script=$(has_script "$build_dir/package.json" "heroku-postbuild")
if [ -n "$heroku_postbuild" ]; then
if [[ "$has_heroku_build_script" == "true" ]] && [[ "$has_build_script" == "true" ]]; then
echo "Detected both \"build\" and \"heroku-postbuild\" scripts"
mcount "scripts.heroku-postbuild-and-build"
run_if_present "$build_dir" 'heroku-postbuild'
elif [[ "$has_heroku_build_script" == "true" ]]; then
mcount "scripts.heroku-postbuild"
if [ "$heroku_postbuild" == "npm run build" ] ||
[ "$heroku_postbuild" == "yarn run build" ] ||
[ "$heroku_postbuild" == "yarn build" ]; then
mcount "scripts.heroku-postbuild-is-npm-build"
fi
run_if_present "$build_dir" 'heroku-postbuild'
elif [[ "$has_build_script" == "true" ]]; then
mcount "scripts.build"
run_if_present "$build_dir" 'build'
fi
}
if [ -n "$heroku_postbuild" ] && [ -n "$build" ]; then
mcount "scripts.build-and-heroku-postbuild"
log_build_scripts() {
local build_dir=${1:-}
if [ "$heroku_postbuild" != "$build" ]; then
mcount "scripts.different-build-and-heroku-postbuild"
fi
fi
meta_set "build-script" "$(read_json "$build_dir/package.json" ".scripts[\"build\"]")"
meta_set "postinstall-script" "$(read_json "$build_dir/package.json" ".scripts[\"postinstall\"]")"
meta_set "heroku-prebuild-script" "$(read_json "$build_dir/package.json" ".scripts[\"heroku-prebuild\"]")"
meta_set "heroku-postbuild-script" "$(read_json "$build_dir/package.json" ".scripts[\"heroku-postbuild\"]")"
}
yarn_node_modules() {
......@@ -118,8 +87,8 @@ yarn_node_modules() {
local production=${YARN_PRODUCTION:-false}
echo "Installing node modules (yarn.lock)"
cd "$build_dir"
monitor "yarn-install" yarn install --production=$production --frozen-lockfile --ignore-engines 2>&1
cd "$build_dir" || return
monitor "yarn-install" yarn install --production="$production" --frozen-lockfile --ignore-engines 2>&1
}
yarn_prune_devdependencies() {
......@@ -127,16 +96,20 @@ yarn_prune_devdependencies() {
if [ "$NODE_ENV" == "test" ]; then
echo "Skipping because NODE_ENV is 'test'"
meta_set "skipped-prune" "true"
return 0
elif [ "$NODE_ENV" != "production" ]; then
echo "Skipping because NODE_ENV is not 'production'"
meta_set "skipped-prune" "true"
return 0
elif [ -n "$YARN_PRODUCTION" ]; then
echo "Skipping because YARN_PRODUCTION is '$YARN_PRODUCTION'"
meta_set "skipped-prune" "true"
return 0
else
cd "$build_dir"
cd "$build_dir" || return
monitor "yarn-prune" yarn install --frozen-lockfile --ignore-engines --ignore-scripts --prefer-offline 2>&1
meta_set "skipped-prune" "false"
fi
}
......@@ -144,17 +117,17 @@ npm_node_modules() {
local build_dir=${1:-}
local production=${NPM_CONFIG_PRODUCTION:-false}
if [ -e $build_dir/package.json ]; then
cd $build_dir
if [ -e "$build_dir/package.json" ]; then
cd "$build_dir" || return
if [ -e $build_dir/package-lock.json ]; then
if [ -e "$build_dir/package-lock.json" ]; then
echo "Installing node modules (package.json + package-lock)"
elif [ -e $build_dir/npm-shrinkwrap.json ]; then
elif [ -e "$build_dir/npm-shrinkwrap.json" ]; then
echo "Installing node modules (package.json + shrinkwrap)"
else
echo "Installing node modules (package.json)"
fi
monitor "npm-install" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
monitor "npm-install" npm install --production="$production" --unsafe-perm --userconfig "$build_dir/.npmrc" 2>&1
else
echo "Skipping (no package.json)"
fi
......@@ -164,33 +137,38 @@ npm_rebuild() {
local build_dir=${1:-}
local production=${NPM_CONFIG_PRODUCTION:-false}
if [ -e $build_dir/package.json ]; then
cd $build_dir
if [ -e "$build_dir/package.json" ]; then
cd "$build_dir" || return
echo "Rebuilding any native modules"
npm rebuild 2>&1
if [ -e $build_dir/npm-shrinkwrap.json ]; then
if [ -e "$build_dir/npm-shrinkwrap.json" ]; then
echo "Installing any new modules (package.json + shrinkwrap)"
else
echo "Installing any new modules (package.json)"
fi
monitor "npm-rebuild" npm install --production=$production --unsafe-perm --userconfig $build_dir/.npmrc 2>&1
monitor "npm-rebuild" npm install --production="$production" --unsafe-perm --userconfig "$build_dir/.npmrc" 2>&1
else
echo "Skipping (no package.json)"
fi
}
npm_prune_devdependencies() {
local npm_version
local build_dir=${1:-}
local npm_version=$(npm --version)
npm_version=$(npm --version)
if [ "$NODE_ENV" == "test" ]; then
echo "Skipping because NODE_ENV is 'test'"
meta_set "skipped-prune" "true"
return 0
elif [ "$NODE_ENV" != "production" ]; then
echo "Skipping because NODE_ENV is not 'production'"
meta_set "skipped-prune" "true"
return 0
elif [ -n "$NPM_CONFIG_PRODUCTION" ]; then
echo "Skipping because NPM_CONFIG_PRODUCTION is '$NPM_CONFIG_PRODUCTION'"
meta_set "skipped-prune" "true"
return 0
elif [ "$npm_version" == "5.3.0" ]; then
mcount "skip-prune-issue-npm-5.3.0"
......@@ -199,6 +177,7 @@ npm_prune_devdependencies() {
echo ""
echo "You can silence this warning by updating to at least npm 5.7.1 in your package.json"
echo "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
meta_set "skipped-prune" "true"
return 0
elif [ "$npm_version" == "5.6.0" ] ||
[ "$npm_version" == "5.5.1" ] ||
......@@ -213,9 +192,11 @@ npm_prune_devdependencies() {
echo ""
echo "You can silence this warning by updating to at least npm 5.7.1 in your package.json"
echo "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
meta_set "skipped-prune" "true"
return 0
else
cd "$build_dir"
monitor "npm-prune" npm prune --userconfig $build_dir/.npmrc 2>&1
cd "$build_dir" || return
monitor "npm-prune" npm prune --userconfig "$build_dir/.npmrc" 2>&1
meta_set "skipped-prune" "false"
fi
}
#!/usr/bin/env bash
get_os() {
uname | tr A-Z a-z
uname | tr '[:upper:]' '[:lower:]'
}
get_cpu() {
......@@ -10,10 +12,11 @@ get_cpu() {
fi
}
os=$(get_os)
cpu=$(get_cpu)
platform="$os-$cpu"
export JQ="$BP_DIR/vendor/jq-$os"
get_platform() {
os=$(get_os)
cpu=$(get_cpu)
echo "$os-$cpu"
}
create_default_env() {
export NPM_CONFIG_LOGLEVEL=${NPM_CONFIG_LOGLEVEL:-error}
......@@ -54,13 +57,15 @@ export_env_dir() {
if [ -d "$env_dir" ]; then
local whitelist_regex=${2:-''}
local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'}
# shellcheck disable=SC2164
pushd "$env_dir" >/dev/null
for e in *; do
[ -e "$e" ] || continue
echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" &&
export "$e=$(cat $e)"
export "$e=$(cat "$e")"
:
done
# shellcheck disable=SC2164
popd >/dev/null
fi
}
......@@ -68,15 +73,15 @@ export_env_dir() {
write_profile() {
local bp_dir="$1"
local build_dir="$2"
mkdir -p $build_dir/.profile.d
cp $bp_dir/profile/* $build_dir/.profile.d/
mkdir -p "$build_dir/.profile.d"
cp "$bp_dir"/profile/* "$build_dir/.profile.d/"
}
write_ci_profile() {
local bp_dir="$1"
local build_dir="$2"
write_profile "$1" "$2"
cp $bp_dir/ci-profile/* $build_dir/.profile.d/
cp "$bp_dir"/ci-profile/* "$build_dir/.profile.d/"
}
write_export() {
......@@ -86,8 +91,8 @@ write_export() {
# only write the export script if the buildpack directory is writable.
# this may occur in situations outside of Heroku, such as running the
# buildpacks locally.
if [ -w ${bp_dir} ]; then
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export
if [ -w "$bp_dir" ]; then
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > "$bp_dir/export"
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> "$bp_dir/export"
fi
}
#!/usr/bin/env bash
# This module is designed to be able to roll out experiments to a
# random segment of users for A/B testing. This takes as input a
# list of experiments along with % chance they will be enabled,
# decides which to enable, and persists these decisions into the
# application cache.
#
# This module takes in no outside data, so it is limited in it's
# uses. While an experiment can be persisted between builds for the
# same app, it cannot be consistent for a given user / team. Even
# different PR apps will be decided independently.
#
# This means that this should not be used for changing the build
# behavior of the buildpack. Builds should always work consistently
# no matter what experiments are turned on or off.
#
# Where this module can be useful is when deciding between two
# identical behaviors that may have performance trade-offs, or
# testing the efficacy of different messaging.
#
# Examples:
# testing two different caching strategies against each other
# showing guidance on a particular type of failure
#
# It is expected that these experiments will be short-lived
#
# Schema
#
# This module expects a "schema" file as input. This is used to
# make sure that all current experiments are documented in one
# place. The file is a list of key=value pairs on individual
# lines.
#
# There is a special "#version" key that is expected that can be
# used to invalidate any existing experiments.
#
# The key is the name, and the value is an integery between 0 and
# 100 inclusive that represents the likelyhood that the experiment
# will be turned on for any given app.
#
# Example:
# ```
# #version=1
# always-on=100 // this will always be turned on, not super useful
# ab-test=50 // this will be split 50/50
# small-test=5 // this will be turned on for 5% of apps
# ```
#
# See tests/unit-fixtures/experiments/experiments-v1 for an example
# variables shared by this whole module
EXPERIMENTS_DATA_FILE=""
experiments_init() {
local name="$1"
local cache_dir="$2"
local schema="$3"
local last_schema_version schema_version random odds
EXPERIMENTS_DATA_FILE="$cache_dir/experiments/$name"
last_schema_version="$(kv_get "$EXPERIMENTS_DATA_FILE" "#version")"
schema_version="$(kv_get "$schema" "#version")"
# If the schema has changed, blow away the current values
# and start fresh. This is essentially "wiping the slate clean"
# and no previous experiments will be enabled for anyone
#
# In the case that the schema version is the same, we keep
# all of the previously decided experiments (file is the same)
# and decide on any new ones
if [[ "$last_schema_version" != "$schema_version" ]]; then
kv_create "$EXPERIMENTS_DATA_FILE"
kv_clear "$EXPERIMENTS_DATA_FILE"
# save out the version we're using to generate this set of experiments
kv_set "$EXPERIMENTS_DATA_FILE" "#version" "$schema_version"
fi
# iterate through the schema and decide if each new experiment
# should be turned on or not
kv_keys "$schema" | tr ' ' '\n' | while read -r key; do
# skip the special version key
if [[ "$key" = "#version" ]]; then
continue
# skip any values that are already decided
elif [[ -n "$(kv_get "$EXPERIMENTS_DATA_FILE" "$key")" ]]; then
continue
else
# generate a random number between 0 and 100
random=$((RANDOM % 100))
# the value in the schema should be a number between 0 and 100 inclusive
odds=$(kv_get "$schema" "$key")
if [[ "$random" -lt "$odds" ]]; then
kv_set "$EXPERIMENTS_DATA_FILE" "$key" "true"
else
kv_set "$EXPERIMENTS_DATA_FILE" "$key" "false"
fi
fi
done
}
# Determine whether an experiment is enabled or disabled
# Must call experiments_init first
#
# Possible outputs: "true" "false" ""
experiments_get() {
kv_get "$EXPERIMENTS_DATA_FILE" "$1"
}
# Outputs a list of experiment names, one-per-line
experiments_list() {
kv_keys "$EXPERIMENTS_DATA_FILE"
}
#!/usr/bin/env bash
warnings=$(mktemp -t heroku-buildpack-nodejs-XXXX)
detect_package_manager() {
......@@ -7,8 +9,16 @@ detect_package_manager() {
esac
}
fail() {
log_meta_data >> "$BUILDPACK_LOG_FILE"
exit 1
}
failure_message() {
local warn="$(cat $warnings)"
local warn
warn="$(cat "$warnings")"
echo ""
echo "We're sorry this build is failing! You can troubleshoot common issues here:"
echo "https://devcenter.heroku.com/articles/troubleshooting-node-deploys"
......@@ -28,16 +38,24 @@ failure_message() {
}
fail_invalid_package_json() {
if ! cat ${1:-}/package.json | $JQ "." 1>/dev/null; then
local is_invalid
is_invalid=$(is_invalid_json_file "${1:-}/package.json")
if "$is_invalid"; then
error "Unable to parse package.json"
mcount 'failures.parse.package-json'
return 1
meta_set "failure" "invalid-package-json"
header "Build failed"
failure_message
fail
fi
}
fail_dot_heroku() {
if [ -f "${1:-}/.heroku" ]; then
mcount "failures.dot-heroku"
meta_set "failure" "dot-heroku"
header "Build failed"
warn "The directory .heroku could not be created
......@@ -46,13 +64,14 @@ fail_dot_heroku() {
binaries like the node runtime and npm. You should remove the
.heroku file or ignore it by adding it to .slugignore
"
exit 1
fail
fi
}
fail_dot_heroku_node() {
if [ -f "${1:-}/.heroku/node" ]; then
mcount "failures.dot-heroku-node"
meta_set "failure" "dot-heroku-node"
header "Build failed"
warn "The directory .heroku/node could not be created
......@@ -61,7 +80,36 @@ fail_dot_heroku_node() {
binaries like the node runtime and npm. You should remove the
.heroku file or ignore it by adding it to .slugignore
"
exit 1
fail
fi
}
fail_iojs_unsupported() {
local build_dir="$1"
local iojs_engine
iojs_engine=$(read_json "$build_dir/package.json" ".engines.iojs")
if [ -n "$iojs_engine" ]; then
mcount "failures.iojs-unsupported"
meta_set "failure" "iojs-unsupported"
warn "io.js no longer supported
You are specifying an io.js version in your package.json:
\"engines\": {
...
\"iojs\": \"${iojs_engine}\"
}
io.js merged back into Nodejs.org in 2015 and has been unsupported
for many years. It is likely to contain several large security
vulnerabilities that have been patched in Node.
You can update your app to use the official Node.js release by
removing the version specfication under \"engines\" in your
package.json.
"
fail
fi
}
......@@ -73,6 +121,7 @@ fail_multiple_lockfiles() {
if [ -f "${1:-}/yarn.lock" ] && [ -f "${1:-}/package-lock.json" ]; then
mcount "failures.two-lock-files"
meta_set "failure" "two-lock-files"
header "Build failed"
warn "Two different lockfiles found: package-lock.json and yarn.lock
......@@ -91,11 +140,12 @@ fail_multiple_lockfiles() {
$ git rm package-lock.json
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-conflicting-lock-files
exit 1
fail
fi
if $has_modern_lockfile && [ -f "${1:-}/npm-shrinkwrap.json" ]; then
mcount "failures.shrinkwrap-lock-file-conflict"
meta_set "failure" "shrinkwrap-lock-file-conflict"
header "Build failed"
warn "Two different lockfiles found
......@@ -114,16 +164,18 @@ fail_multiple_lockfiles() {
- package-lock.json
- npm-shrinkwrap.json
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-conflicting-lock-files
exit 1
fail
fi
}
fail_yarn_outdated() {
local yarn_engine
local log_file="$1"
local yarn_engine=$(read_json "$BUILD_DIR/package.json" ".engines.yarn")
if grep -qi 'error .install. has been replaced with .add. to add new dependencies' "$log_file"; then
yarn_engine=$(yarn --version)
mcount "failures.outdated-yarn"
meta_set "failure" "outdated-yarn"
echo ""
warn "Outdated Yarn version: $yarn_engine
......@@ -136,7 +188,7 @@ fail_yarn_outdated() {
\"yarn\": \"1.3.2\"
}
" https://devcenter.heroku.com/articles/nodejs-support#specifying-a-yarn-version
exit 1
fail
fi
}
......@@ -144,6 +196,7 @@ fail_yarn_lockfile_outdated() {
local log_file="$1"
if grep -qi 'Your lockfile needs to be updated' "$log_file"; then
mcount "failures.outdated-yarn-lockfile"
meta_set "failure" "outdated-yarn-lockfile"
echo ""
warn "Outdated Yarn lockfile
......@@ -159,16 +212,18 @@ fail_yarn_lockfile_outdated() {
$ git commit -m \"Updated Yarn lockfile\"
$ git push heroku master
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-an-outdated-yarn-lockfile
exit 1
fail
fi
}
fail_bin_install() {
local error
local bin="$1"
local version="$2"
local platform="$3"
# re-curl the result, saving off the reason for the failure this time
local error=$(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/$bin/$platform/latest.txt")
error=$(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/$bin/$platform/latest.txt")
if [[ $error = "No result" ]]; then
case $bin in
......@@ -187,11 +242,14 @@ fail_bin_install() {
}
fail_node_install() {
local node_engine
local log_file="$1"
local node_engine=$(read_json "$BUILD_DIR/package.json" ".engines.node")
local build_dir="$2"
if grep -qi 'Could not find Node version corresponding to version requirement' "$log_file"; then
node_engine=$(read_json "$build_dir/package.json" ".engines.node")
mcount "failures.invalid-node-version"
meta_set "failure" "invalid-node-version"
echo ""
warn "No matching version found for Node: $node_engine
......@@ -213,16 +271,19 @@ fail_node_install() {
\"node\": \"6.11.1\"
}
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-no-matching-node-versions
exit 1
fail
fi
}
fail_yarn_install() {
local yarn_engine
local log_file="$1"
local yarn_engine=$(read_json "$BUILD_DIR/package.json" ".engines.yarn")
local build_dir="$2"
if grep -qi 'Could not find Yarn version corresponding to version requirement' "$log_file"; then
yarn_engine=$(read_json "$build_dir/package.json" ".engines.yarn")
mcount "failures.invalid-yarn-version"
meta_set "failure" "invalid-yarn-version"
echo ""
warn "No matching version found for Yarn: $yarn_engine
......@@ -246,7 +307,7 @@ fail_yarn_install() {
\"yarn\": \"0.27.5\"
}
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-no-matching-yarn-versions
exit 1
fail
fi
}
......@@ -254,6 +315,7 @@ fail_invalid_semver() {
local log_file="$1"
if grep -qi 'Error: Invalid semantic version' "$log_file"; then
mcount "failures.invalid-semver-requirement"
meta_set "invalid-semver-requirement"
echo ""
warn "Invalid semver requirement
......@@ -265,7 +327,7 @@ fail_invalid_semver() {
However you have specified a version requirement that is not a valid
semantic version.
" https://kb.heroku.com/why-is-my-node-js-build-failing-because-of-an-invalid-semver-requirement
exit 1
fail
fi
}
......@@ -273,26 +335,31 @@ log_other_failures() {
local log_file="$1"
if grep -qi "sh: 1: .*: not found" "$log_file"; then
mcount "failures.dev-dependency-tool-not-installed"
meta_set "failure" "dev-dependency-tool-not-installed"
return 0
fi
if grep -qi "Failed at the bcrypt@\d.\d.\d install script" "$log_file"; then
mcount "failures.bcrypt-permissions-issue"
meta_set "failure" "bcrypt-permissions-issue"
return 0
fi
if grep -qi "Versions of @angular/compiler-cli and typescript could not be determined" "$log_file"; then
mcount "failures.ng-cli-version-issue"
meta_set "failure" "ng-cli-version-issue"
return 0
fi
if grep -qi "Cannot read property '0' of undefined" "$log_file"; then
mcount "failures.npm-property-zero-issue"
meta_set "failure" "npm-property-zero-issue"
return 0
fi
if grep -qi "npm is known not to run on Node.js v\d.\d.\d" "$log_file"; then
mcount "failures.npm-known-bad-version"
meta_set "failure" "npm-known-bad-version"
return 0
fi
......@@ -300,76 +367,91 @@ log_other_failures() {
# "error Couldn't find any versions for" = yarn
if grep -q -e "notarget No matching version found for" -e "error Couldn't find any versions for" "$log_file"; then
mcount "failures.bad-version-for-dependency"
meta_set "failure" "bad-version-for-dependency"
return 0
fi
if grep -qi "You are likely using a version of node-tar or npm that is incompatible with this version of Node.js" "$log_file"; then
mcount "failures.node-9-npm-issue"
meta_set "failure" "node-9-npm-issue"
return 0
fi
if grep -qi "console.error(\`a bug known to break npm" "$log_file"; then
mcount "failures.old-node-new-npm"
meta_set "failure" "old-node-new-npm"
return 0
fi
if grep -qi "CALL_AND_RETRY_LAST Allocation failed" "$log_file"; then
mcount "failures.build-out-of-memory-error"
meta_set "failure" "build-out-of-memory-error"
return 0
fi
if grep -qi "enoent ENOENT: no such file or directory" "$log_file"; then
mcount "failures.npm-enoent"
meta_set "failure" "npm-enoent"
return 0
fi
if grep -qi "ERROR in [^ ]* from UglifyJs" "$log_file"; then
mcount "failures.uglifyjs"
meta_set "failure" "uglifyjs"
return 0
fi
# https://github.com/angular/angular-cli/issues/4551
if grep -qi "Module not found: Error: Can't resolve '\.\/\$\$_gendir\/app\/app\.module\.ngfactory'" "$log_file"; then
mcount "failures.ng-cli-issue-4551"
meta_set "failure" "ng-cli-issue-4551"
return 0
fi
if grep -qi "Host key verification failed" "$log_file"; then
mcount "failures.private-git-dependency-without-auth"
meta_set "failure" "private-git-dependency-without-auth"
return 0
fi
# same as the next test, but isolate bcyrpt specifically
if grep -qi "Failed at the bcrypt@\d\.\d\.\d install" "$log_file"; then
mcount "failures.bcrypt-failed-to-build"
meta_set "failure" "bcrypt-failed-to-build"
return 0
fi
if grep -qi "Failed at the [^ ]* install script" "$log_file"; then
mcount "failures.dependency-failed-to-build"
meta_set "failure" "dependency-failed-to-build"
return 0
fi
if grep -qi "Line \d*: '.*' is not defined" "$log_file"; then
mcount "failures.undefined-variable-lint"
meta_set "failure" "undefined-variable-lint"
return 0
fi
if grep -qi "npm ERR! code EBADPLATFORM" "$log_file"; then
mcount "failures.npm-ebadplatform"
meta_set "failure" "npm-ebadplatform"
return 0
fi
if grep -qi "npm ERR! code EINVALIDPACKAGENAME" "$log_file"; then
mcount "failures.npm-package-name-typo"
meta_set "failure" "npm-package-name-typo"
return 0
fi
if grep -qi -e "npm ERR! code E404" -e "error An unexpected error occurred: .* Request failed \"404 Not Found\"" "$log_file"; then
mcount "failures.module-404"
meta_set "failure" "module-404"
if grep -qi "flatmap-stream" "$log_file"; then
mcount "flatmap-stream-404"
meta_set "failure" "flatmap-stream-404"
warn "The flatmap-stream module has been removed from the npm registry
On November 26th, npm was notified of a malicious package that had made its
......@@ -377,7 +459,7 @@ log_other_failures() {
npm responded by removing flatmap-stream and event-stream@3.3.6 from the Registry
and taking ownership of the event-stream package to prevent further abuse.
" https://kb.heroku.com/4OM7X18J/why-am-i-seeing-npm-404-errors-for-event-stream-flatmap-stream-in-my-build-logs
exit 1
fail
fi
return 0
......@@ -385,6 +467,7 @@ log_other_failures() {
if grep -qi "sh: 1: cd: can't cd to" "$log_file"; then
mcount "failures.cd-command-fail"
meta_set "failure" "cd-command-fail"
return 0
fi
......@@ -392,11 +475,13 @@ log_other_failures() {
if grep -qi "Module not found: Error: Can't resolve" "$log_file"; then
mcount "failures.webpack.module-not-found"
meta_set "failure" "webpack-module-not-found"
return 0
fi
if grep -qi "sass-loader/lib/loader.js:3:14" "$log_file"; then
mcount "failures.webpack.sass-loader-error"
meta_set "failure" "webpack-sass-loader-error"
return 0
fi
......@@ -404,21 +489,25 @@ log_other_failures() {
if grep -qi "Property '.*' does not exist on type '.*'" "$log_file"; then
mcount "failures.typescript.missing-property"
meta_set "failure" "typescript-missing-property"
return 0
fi
if grep -qi "Property '.*' is private and only accessible within class '.*'" "$log_file"; then
mcount "failures.typescript.private-property"
meta_set "failure" "typescript-private-property"
return 0
fi
if grep -qi "error TS2307: Cannot find module '.*'" "$log_file"; then
mcount "failures.typescript.missing-module"
meta_set "failure" "typescript-missing-module"
return 0
fi
if grep -qi "error TS2688: Cannot find type definition file for '.*'" "$log_file"; then
mcount "failures.typescript.missing-type-definition"
meta_set "failure" "typescript-missing-type-definition"
return 0
fi
......@@ -426,6 +515,7 @@ log_other_failures() {
# Ex: Error: Cannot find module 'chalk'
if grep -q "Error: Cannot find module '[^/C\.]" "$log_file"; then
mcount "failures.missing-module.npm"
meta_set "failure" "missing-module-npm"
return 0
fi
......@@ -433,6 +523,7 @@ log_other_failures() {
# Ex: Error: Cannot find module '/tmp/build_{hash}/...'
if grep -q "Error: Cannot find module '/" "$log_file"; then
mcount "failures.missing-module.local-absolute"
meta_set "failure" "missing-module-local-absolute"
return 0
fi
......@@ -440,6 +531,7 @@ log_other_failures() {
# Ex: Error: Cannot find module './lib/utils'
if grep -q "Error: Cannot find module '\." "$log_file"; then
mcount "failures.missing-module.local-relative"
meta_set "failure" "missing-module-local-relative"
return 0
fi
......@@ -447,16 +539,19 @@ log_other_failures() {
# Ex: Error: Cannot find module 'C:\Users...'
if grep -q "Error: Cannot find module 'C:" "$log_file"; then
mcount "failures.missing-module.local-windows"
meta_set "failure" "missing-module-local-windows"
return 0
fi
# matches the subsequent lines of a stacktrace
if grep -q 'at [^ ]* \([^ ]*:\d*\d*\)' "$log_file"; then
mcount "failures.unknown-stacktrace"
meta_set "failure" "unknown-stacktrace"
return 0
fi
# If we've made it this far it's not an error we've added detection for yet
meta_set "failure" "unknown"
mcount "failures.unknown"
}
......@@ -467,7 +562,7 @@ warning() {
echo "- $tip"
echo " $url"
echo ""
} >> $warnings
} >> "$warnings"
}
warn() {
......@@ -486,7 +581,7 @@ warn_node_engine() {
elif [ "$node_engine" == "*" ]; then
warning "Dangerous semver range (*) in engines.node" "https://devcenter.heroku.com/articles/nodejs-support#specifying-a-node-js-version"
mcount 'warnings.node.star'
elif [ ${node_engine:0:1} == ">" ]; then
elif [ "${node_engine:0:1}" == ">" ]; then
warning "Dangerous semver range (>) in engines.node" "https://devcenter.heroku.com/articles/nodejs-support#specifying-a-node-js-version"
mcount 'warnings.node.greater'
fi
......@@ -497,6 +592,9 @@ warn_prebuilt_modules() {
if [ -e "$build_dir/node_modules" ]; then
warning "node_modules checked into source control" "https://blog.heroku.com/node-habits-2016#9-only-git-the-important-bits"
mcount 'warnings.modules.prebuilt'
meta_set "checked-in-node-modules" "true"
else
meta_set "checked-in-node-modules" "false"
fi
}
......@@ -509,17 +607,23 @@ warn_missing_package_json() {
}
warn_old_npm() {
local npm_version="$(npm --version)"
local npm_version latest_npm
npm_version="$(npm --version)"
if [ "${npm_version:0:1}" -lt "2" ]; then
local latest_npm="$(curl --silent --get --retry 5 --retry-max-time 15 https://semver.herokuapp.com/npm/stable)"
latest_npm="$(curl --silent --get --retry 5 --retry-max-time 15 https://semver.herokuapp.com/npm/stable)"
warning "This version of npm ($npm_version) has several known issues - consider upgrading to the latest release ($latest_npm)" "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
mcount 'warnings.npm.old'
fi
}
warn_old_npm_lockfile() {
local npm_version
local npm_lock=$1
local npm_version="$(npm --version)"
npm_version="$(npm --version)"
if $npm_lock && [ "${npm_version:0:1}" -lt "5" ]; then
warn "This version of npm ($npm_version) does not support package-lock.json. Please
update your npm version in package.json." "https://devcenter.heroku.com/articles/nodejs-support#specifying-an-npm-version"
......@@ -552,13 +656,16 @@ warn_angular_resolution() {
}
warn_missing_devdeps() {
local dev_deps
local log_file="$1"
local build_dir="$2"
if grep -qi 'cannot find module' "$log_file"; then
warning "A module may be missing from 'dependencies' in package.json" "https://devcenter.heroku.com/articles/troubleshooting-node-deploys#ensure-you-aren-t-relying-on-untracked-dependencies"
mcount 'warnings.modules.missing'
if [ "$NPM_CONFIG_PRODUCTION" == "true" ]; then
local devDeps=$(read_json "$BUILD_DIR/package.json" ".devDependencies")
if [ "$devDeps" != "" ]; then
dev_deps=$(read_json "$build_dir/package.json" ".devDependencies")
if [ "$dev_deps" != "" ]; then
warning "This module may be specified in 'devDependencies' instead of 'dependencies'" "https://devcenter.heroku.com/articles/nodejs-support#devdependencies"
mcount 'warnings.modules.devdeps'
fi
......@@ -567,11 +674,13 @@ warn_missing_devdeps() {
}
warn_no_start() {
local log_file="$1"
if ! [ -e "$BUILD_DIR/Procfile" ]; then
local startScript=$(read_json "$BUILD_DIR/package.json" ".scripts.start")
if [ "$startScript" == "" ]; then
if ! [ -e "$BUILD_DIR/server.js" ]; then
local start_script
local build_dir="$1"
if ! [ -e "$build_dir/Procfile" ]; then
start_script=$(read_json "$build_dir/package.json" ".scripts.start")
if [ "$start_script" == "" ]; then
if ! [ -e "$build_dir/server.js" ]; then
warn "This app may not specify any way to start a node process" "https://devcenter.heroku.com/articles/nodejs-support#default-web-process-type"
mcount 'warnings.unstartable'
fi
......@@ -588,8 +697,11 @@ warn_econnreset() {
}
warn_unmet_dep() {
local package_manager
local log_file="$1"
local package_manager=$(detect_package_manager)
package_manager=$(detect_package_manager)
if grep -qi 'unmet dependency' "$log_file" || grep -qi 'unmet peer dependency' "$log_file"; then
warn "Unmet dependencies don't fail $package_manager install but may cause runtime issues" "https://github.com/npm/npm/issues/7494"
mcount 'warnings.modules.unmet'
......
#!/usr/bin/env bash
JQ="$BP_DIR/vendor/jq-$(get_os)"
read_json() {
local file=$1
local key=$2
if test -f $file; then
cat $file | $JQ --raw-output "$key // \"\"" || return 1
local file="$1"
local key="$2"
if test -f "$file"; then
# shellcheck disable=SC2002
cat "$file" | $JQ --raw-output "$key // \"\"" || return 1
else
echo ""
fi
}
has_script() {
local file="$1"
local key="$2"
if test -f "$file"; then
# shellcheck disable=SC2002
cat "$file" | $JQ ".[\"scripts\"] | has(\"$key\")"
else
echo "false"
fi
}
is_invalid_json_file() {
local file="$1"
# shellcheck disable=SC2002
if ! cat "$file" | $JQ "." 1>/dev/null; then
echo "true"
else
echo "false"
fi
}
\ No newline at end of file
#!/usr/bin/env bash
kv_create() {
local f=$1
mkdir -p $(dirname $f)
touch $f
mkdir -p "$(dirname "$f")"
touch "$f"
}
kv_clear() {
local f=$1
echo "" > $f
echo "" > "$f"
}
kv_set() {
if [[ $# -eq 3 ]]; then
local f=$1
if [[ -f $f ]]; then
echo "$2=$3" >> $f
echo "$2=$3" >> "$f"
fi
fi
}
# get the value, but don't unwrap quotes
kv_get() {
if [[ $# -eq 2 ]]; then
local f=$1
if [[ -f $f ]]; then
grep "^$2=" $f | sed -e "s/^$2=//" | tail -n 1
grep "^$2=" "$f" | sed -e "s/^$2=//" | tail -n 1
fi
fi
}
# get the value, but wrap it in quotes if it contains a space
kv_get_escaped() {
local value=$(kv_get $1 $2 $3)
local value
value=$(kv_get "$1" "$2")
if [[ $value =~ [[:space:]]+ ]]; then
echo "\"$value\""
else
echo $value
echo "$value"
fi
}
......@@ -43,10 +45,18 @@ kv_keys() {
local keys=()
if [[ -f $f ]]; then
# get list of keys
while IFS="=" read -r key value; do
keys+=("$key")
done < $f
# Iterate over each line, splitting on the '=' character
#
# The || [[ -n "$key" ]] statement addresses an issue with reading the last line
# of a file when there is no newline at the end. This will not happen if the file
# is created with this module, but can happen if it is written by hand.
# See: https://stackoverflow.com/questions/12916352/shell-script-read-missing-last-line
while IFS="=" read -r key value || [[ -n "$key" ]]; do
# if there are any empty lines in the store, skip them
if [[ -n $key ]]; then
keys+=("$key")
fi
done < "$f"
echo "${keys[@]}" | tr ' ' '\n' | sort -u
fi
......@@ -55,9 +65,9 @@ kv_keys() {
kv_list() {
local f=$1
kv_keys $f | tr ' ' '\n' | while read -r key; do
kv_keys "$f" | tr ' ' '\n' | while read -r key; do
if [[ -n $key ]]; then
echo "$key=$(kv_get_escaped $f $key)"
echo "$key=$(kv_get_escaped "$f" "$key")"
fi
done
}
#!/usr/bin/env bash
# variable shared by this whole module
BUILD_DATA_FILE=""
PREVIOUS_BUILD_DATA_FILE=""
meta_create() {
local cache_dir="$1"
BUILD_DATA_FILE="$cache_dir/build-data/nodejs"
PREVIOUS_BUILD_DATA_FILE="$cache_dir/build-data/nodejs-prev"
# if the file already exists because it's from the last build, save it
if [[ -f "$BUILD_DATA_FILE" ]]; then
cp "$BUILD_DATA_FILE" "$PREVIOUS_BUILD_DATA_FILE"
fi
kv_create "$BUILD_DATA_FILE"
# make sure this doesnt grow over time
kv_clear "$BUILD_DATA_FILE"
}
meta_get() {
kv_get "$BUILD_DATA_FILE" "$1"
}
meta_set() {
kv_set "$BUILD_DATA_FILE" "$1" "$2"
}
# similar to mtime from stdlib
meta_time() {
local key="$1"
local start="$2"
local end="${3:-$(nowms)}"
local time
time="$(echo "${start}" "${end}" | awk '{ printf "%.3f", ($2 - $1)/1000 }')"
kv_set "$BUILD_DATA_FILE" "$key" "$time"
}
# similar to mtime from stdlib
meta_time() {
local key="$1"
local start="$2"
local end="${3:-$(nowms)}"
local time
time="$(echo "$start" "$end" | awk '{ printf "%.3f", ($2 - $1)/1000 }')"
kv_set "$BUILD_DATA_FILE" "$1" "$time"
}
# Retrieve a value from a previous build if it exists
# This is useful to give the user context about what changed if the
# build has failed. Ex:
# - changed stacks
# - deployed with a new major version of Node
# - etc
meta_prev_get() {
kv_get "$PREVIOUS_BUILD_DATA_FILE" "$1"
}
log_meta_data() {
# print all values on one line in logfmt format
# https://brandur.org/logfmt
# the echo call ensures that all values are printed on a single line
# shellcheck disable=SC2005 disable=SC2046
echo $(kv_list "$BUILD_DATA_FILE")
}
#!/usr/bin/env bash
monitor_memory_usage() {
local output_file="$1"
......@@ -12,7 +13,7 @@ monitor_memory_usage() {
pid=$!
# if this build process is SIGTERM'd
trap "kill -TERM $pid" TERM
trap 'kill -TERM $pid' TERM
# set the peak memory usage to 0 to start
peak="0"
......@@ -29,7 +30,7 @@ monitor_memory_usage() {
done
# ps gives us kb, let's convert to mb for convenience
echo "$(($peak / 1024))" > $output_file
echo "$((peak / 1024))" > "$output_file"
# After wait returns we can get the exit code of $command
wait $pid
......@@ -43,16 +44,21 @@ monitor_memory_usage() {
}
monitor() {
local peak_mem_output start
local command_name=$1
shift
local command=( "$@" )
local peak_mem_output=$(mktemp)
local start=$(nowms)
peak_mem_output=$(mktemp)
start=$(nowms)
# execute the subcommand and save the peak memory usage
monitor_memory_usage $peak_mem_output "${command[@]}"
monitor_memory_usage "$peak_mem_output" "${command[@]}"
mtime "exec.$command_name.time" "${start}"
mmeasure "exec.$command_name.memory" "$(cat $peak_mem_output)"
mmeasure "exec.$command_name.memory" "$(cat "$peak_mem_output")"
meta_time "$command_name-time" "$start"
meta_set "$command_name-memory" "$(cat "$peak_mem_output")"
}
#!/usr/bin/env bash
# TODO: Merge these with the output helpers in buildpack-stdlib:
# https://github.com/heroku/buildpack-stdlib
......@@ -26,6 +28,15 @@ header() {
echo "-----> $*" || true
}
bright_header() {
echo "" || true
echo -e "\033[1;33m-----> $* \033[0m"
}
header_skip_newline() {
echo "-----> $*" || true
}
error() {
echo " ! $*" >&2 || true
echo "" || true
......
#!/usr/bin/env bash
get_node_major_version() {
local node_version="$(node --version)"
local node_version
node_version="$(node --version)"
# major_string will be ex: "6." "8." "10"
local major_string=${node_version:1:2}
# strip any "."s from major_string
local major=${major_string//.}
echo $major
echo "$major"
}
install_plugin() {
local major
local bp_dir="$1"
local build_dir="$2"
local major=$(get_node_major_version)
major=$(get_node_major_version)
local plugin="${bp_dir}/plugin/heroku-nodejs-plugin-node-${major}.tar.gz"
# If we have a version of the plugin compiled for this version of node, and the
......@@ -19,6 +23,6 @@ install_plugin() {
# It will be included at runtime once the user opts into the Node metrics feature
if [[ -f "${plugin}" ]] && [[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
mkdir -p "${build_dir}/.heroku/"
tar -xzf ${plugin} -C "${build_dir}/.heroku/"
tar -xzf "${plugin}" -C "${build_dir}/.heroku/"
fi
}
#!/usr/bin/env bash
uuid_fallback()
{
local N B C='89ab'
for (( N=0; N < 16; ++N ))
do
B=$(( RANDOM%256 ))
case $N in
6)
printf '4%x' $(( B%16 ))
;;
8)
printf '%c%x' ${C:$RANDOM%${#C}:1} $(( B%16 ))
;;
3 | 5 | 7 | 9)
printf '%02x-' $B
;;
*)
printf '%02x' $B
;;
esac
done
echo
}
uuid() {
# On Heroku's stack, there is a uuid command
if [[ -f /proc/sys/kernel/random/uuid ]]; then
cat /proc/sys/kernel/random/uuid
# on macOS there is also a command
elif [[ -x "$(command -v uuidgen)" ]]; then
uuidgen | tr "[:upper:]" "[:lower:]"
# If you are running this buildpack on an image without either of the above binaries
# then let's provide something that approximates this functionality, but beware that
# we can make no guarantees of true randomness or uniqueness of this ID. However it is
# likely only being piped to /dev/null
#
# If that's not true for you, please file an issue and let us know:
# https://github.com/heroku/heroku-buildpack-nodejs/issues
else
uuid_fallback
fi
}
test: heroku-18 heroku-16 cedar-14
build:
@GOOS=darwin GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-darwin ./cmd/resolve-version
@GOOS=linux GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-linux ./cmd/resolve-version
build-production:
# build go binaries and then compress them
@GOOS=darwin GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-darwin ./cmd/resolve-version
@GOOS=linux GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-linux ./cmd/resolve-version
# https://blog.filippo.io/shrink-your-go-binaries-with-this-one-weird-trick/
upx --brute vendor/resolve-version-linux
upx --brute vendor/resolve-version-darwin
test-binary:
go test -v ./cmd/... -tags=integration
shellcheck:
@shellcheck -x bin/compile bin/detect bin/release bin/test bin/test-compile
@shellcheck -x lib/**
@shellcheck -x ci-profile/**
@shellcheck -x etc/**
heroku-18:
@echo "Running tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
......
......@@ -8,7 +8,7 @@ TAG_NAME=${1:-}
PLUGIN_DIR=$(dirname $0)
handle_failure() {
echo "Failure running script."
echo "Failure running script on line $1."
echo "This may be rate-limiting from Github if you've run this script a few times. Here is the rate limit response:"
......@@ -34,6 +34,8 @@ download() {
delete_old_plugin() {
local dir=${1}
rm -f "$dir/heroku-nodejs-plugin-node-12.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-12.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-11.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-11.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-10.sha512"
......@@ -64,6 +66,10 @@ download_assets_for_release() {
# Node 11
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.sha512" "$dir/heroku-nodejs-plugin-node-11.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-11.tar.gz"
# Node 12
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-12-$tag.sha512" "$dir/heroku-nodejs-plugin-node-12.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-12-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-12.tar.gz"
}
test_hash() {
......@@ -81,7 +87,7 @@ test_hash() {
fi
}
trap 'handle_failure' ERR
trap 'handle_failure ${LINENO}' ERR
if [[ -z $TAG_NAME ]]; then
TAG_NAME=$(get_latest_release)
......@@ -100,5 +106,6 @@ test_hash 8 $PLUGIN_DIR
test_hash 9 $PLUGIN_DIR
test_hash 10 $PLUGIN_DIR
test_hash 11 $PLUGIN_DIR
test_hash 12 $PLUGIN_DIR
echo "Done"
10e91fb8f741a42226fe91daf41a30032f6950f560622ce4523766d93dfd9d01dc88b3d5bfb26968d69d5f8fb6c61f0b35db310f61c366ae0c8d48c9181ee02c heroku-nodejs-plugin-node-10-v4.tar.gz
d6ad0f45d5564f324147f590ce9ac39c5973a64dfb1905eb993dfcf3dce21b6934a7821ffbd853ff30e89952ba17e7667809152e187453d7c18579d945a5f8bd heroku-nodejs-plugin-node-10-v5.tar.gz
6d0732e32f6cb2a818c415c5fc454912113da273c3b158124c630db473aa0b157fc81c41dd90de887a804e3c2c2eae0b9897d0da329b7a6e52b9cf27db4e5d0a heroku-nodejs-plugin-node-11-v4.tar.gz
0afc36d4268b7ce3dd1c59813d7727a1bae715645bc0fb006ca992ccd028df692e31d2a4df697ab08d6b4baad03cd6ebef8525e481e0c5cf12e810c30e1da0cb heroku-nodejs-plugin-node-11-v5.tar.gz
0020b60fd3aebcc1fb13770e3445a93c0633579853ee0d31d3dc12d02e4a8a1d418a4d4add2145da46f9d2820f6ae1e412a0beb4eb3e46d7fc8326d2e2d8248d heroku-nodejs-plugin-node-12-v5.tar.gz
82a7f67bf61490d40aa69455a6a1fd7459f9fc219dfe9b0ac65cf7345e8a1c10070ce96ef42eecc5eb82fb5d7ea74b69e1a060c44f0a438eb02e81fd109c2ea4 heroku-nodejs-plugin-node-8-v4.tar.gz
d96566824853bc7657fbf2f651067ed0b1747e4d6b4b9b443df6f2d490742b4e571dff5165e68c6d8af44501af0bdddd98e71eeb5fcc9816348453a1f8a314d9 heroku-nodejs-plugin-node-8-v5.tar.gz
d7bca7b45d0852e082103041b7a5d1331470074223314273275c4cd8d09ef6174c14b270ede7f501e6280e935814535783a8d4050d9e21a8918b1ab81f444870 heroku-nodejs-plugin-node-9-v4.tar.gz
eefbf22a508e0fd4dea303a7d247a4c6ebb60803c5221e43dd6a9921332ad32791f6b4e95a3379c519290a6767a1dc797f3e68ed583427a53695f47c7b80ccdd heroku-nodejs-plugin-node-9-v5.tar.gz
require_relative '../spec_helper'
describe "Node Metrics for v12.x" do
context "test metrics for Node v12.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-12-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v12.x" do
context "a single-process Node v12.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-12")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "12.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "12.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
......@@ -7,7 +7,7 @@
"url": "http://github.com/example/example.git"
},
"engines": {
"node": "~0.10.0"
"node": "10.x"
},
"scripts" : {
"build": "echo build hook message",
......
......@@ -6,11 +6,8 @@
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"engines": {
"node": "10.x"
},
"scripts" : {
"build" : "echo build hook message"
},
"heroku-run-build-script": true
"license": "MIT"
}
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
......@@ -7,7 +7,7 @@
"url" : "http://github.com/example/example.git"
},
"engines": {
"node": "~0.10.0"
"node": "10.x"
},
"scripts" : {
"build" : "echo build hook message"
......
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"scripts" : {
"build" : "echo build hook message",
"heroku-postbuild": ""
}
}
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"engines": {
"node": "10.x"
},
"scripts" : {
"build" : "echo build hook message",
"heroku-postbuild": ""
}
}
......@@ -10,9 +10,9 @@
"hashish": "*"
},
"engines": {
"node": "0.10.38"
"node": "8.x"
},
"scripts": {
"postinstall": "exit 1"
"heroku-postbuild": "exit 1"
}
}
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository": {
"type": "git",
"url": "http://github.com/example/example.git"
},
"engines": {
"node": "10.x"
},
"scripts": {
"build": "echo build hook message",
"heroku-postbuild": "",
"random-script-name": ""
}
}
......@@ -7,7 +7,7 @@
"url" : "http://github.com/example/example.git"
},
"engines": {
"node": "~0.10.0"
"node": "10.x"
},
"scripts" : {
"heroku-prebuild" : "echo heroku-prebuild hook message",
......
......@@ -16,27 +16,38 @@ testFlatmapStream() {
}
testBuildScriptBehavior() {
# opt in to new build script behavior
cache=$(mktmpdir)
env_dir=$(mktmpdir)
echo "true" > $env_dir/NEW_BUILD_SCRIPT_BEHAVIOR
# The 'build' script is run by default
compile "build-script" $cache $env_dir
compile "build-script"
assertCaptured "Running build"
assertCapturedSuccess
# the 'heroku-postbuild' script takes precedence over the 'build' script
compile "build-script-override" $cache $env_dir
assertCaptured "Detected both 'build' and 'heroku-postbuild' scripts"
compile "build-script-override"
assertCaptured "Detected both \"build\" and \"heroku-postbuild\" scripts"
assertCaptured "Running heroku-postbuild"
assertCapturedSuccess
}
testBuildScriptOptIn() {
compile "build-script-opt-in"
assertCaptured "Running build"
assertCaptured "Opting in to new default build script behavior"
testBuildScriptYarn() {
compile "build-script-yarn"
assertCaptured "Running build (yarn)"
assertCaptured "build hook message"
assertCapturedSuccess
}
testPreferEmptyHerokuPostbuildOverBuild() {
compile "empty-heroku-postbuild"
assertCaptured "Detected both \"build\" and \"heroku-postbuild\" scripts"
assertCaptured "Running heroku-postbuild"
assertNotCaptured "build hook message"
assertCapturedSuccess
}
testEmptyHerokuPostbuildWithYarn() {
compile "empty-heroku-postbuild-yarn"
assertCaptured "Running heroku-postbuild (yarn)"
assertNotCaptured "build hook message"
assertNotCaptured "Script must exist"
assertCapturedSuccess
}
......@@ -479,13 +490,6 @@ testInvalidNodeSemver() {
assertCapturedError
}
testInvalidIo() {
compile "invalid-io"
assertCaptured "Resolving iojs version 2.0.99"
assertCaptured "Could not find Iojs version corresponding to version requirement: 2.0.99"
assertCapturedError
}
testSignatureInvalidation() {
cache=$(mktmpdir)
env_dir=$(mktmpdir)
......@@ -527,10 +531,8 @@ testDetectWithoutPackageJson() {
testIoJs() {
compile "iojs"
assertCaptured "engines.iojs (package.json): 1.0."
assertCaptured "Downloading and installing iojs 1.0."
assertNotCaptured "Downloading and installing npm"
assertCapturedSuccess
assertCaptured "io.js no longer supported"
assertCapturedError
}
testSpecificVersion() {
......@@ -562,11 +564,6 @@ testOldNpm() {
assertCapturedError
}
testOldNpm2() {
compile "failing-build"
assertCaptured "This version of npm (1.4.28) has several known issues"
}
testNonexistentNpm() {
compile "nonexistent-npm"
assertCaptured "Unable to install npm 1.1.65"
......@@ -594,7 +591,6 @@ testNpmVersionSpecific() {
testFailingBuild() {
compile "failing-build"
assertCaptured "Building dependencies"
assertCaptured "Build failed"
assertCaptured "We're sorry this build is failing"
assertNotCaptured "Checking startup method"
......@@ -874,6 +870,12 @@ testCIEnvVars() {
assertCapturedSuccess
}
# If compile fails, test-compile should also fail
testCICompileFails() {
testCompile "failing-build"
assertCapturedError
}
testCIEnvVarsOverride() {
env_dir=$(mktmpdir)
echo "banana" > $env_dir/NODE_ENV
......@@ -1031,6 +1033,96 @@ testMemoryMetrics() {
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-postbuild.memory=" $metrics_log
}
testBuildMetaData() {
local log_file=$(mktemp)
BUILDPACK_LOG_FILE="$log_file" compile "pre-post-build-scripts"
# build info
assertFileContains "node-package-manager=npm" $log_file
assertFileContains "checked-in-node-modules=false" $log_file
assertFileContains "has-node-lock-file=false" $log_file
assertFileContains "cache-status=not-found" $log_file
assertFileContains "node-build-success=true" $log_file
assertFileContains "build-time=" $log_file
assertFileContains "app-uuid=" $log_file
assertFileContains "build-uuid=" $log_file
# binary versions
assertFileContains "node-version-request=10.x" $log_file
assertFileContains "npm-version-request= " $log_file
# log build scripts
assertFileContains "heroku-prebuild-script=\"echo heroku-prebuild hook message\"" $log_file
assertFileContains "heroku-postbuild-script=\"echo heroku-postbuild hook message\"" $log_file
assertFileContains "build-script= " $log_file
# monitor calls
assertFileContains "install-node-binary-memory=" $log_file
assertFileContains "install-node-binary-time=" $log_file
assertFileContains "install-npm-binary-time=" $log_file
assertFileContains "install-npm-binary-memory=" $log_file
assertFileContains "heroku-prebuild-time=" $log_file
assertFileContains "heroku-prebuild-memory=" $log_file
assertFileContains "npm-install-time=" $log_file
assertFileContains "npm-install-memory=" $log_file
assertFileContains "heroku-postbuild-time=" $log_file
assertFileContains "heroku-postbuild-memory=" $log_file
assertFileContains "npm-prune-memory=" $log_file
assertFileContains "npm-prune-time=" $log_file
# erase the log file
echo "" > $log_file
BUILDPACK_LOG_FILE="$log_file" compile "yarn"
assertFileContains "node-package-manager=yarn" $log_file
assertFileContains "has-node-lock-file=true" $log_file
assertFileContains "yarn-version-request=1.x" $log_file
assertFileContains "yarn-version=1." $log_file
assertFileContains "install-yarn-binary-memory=" $log_file
assertFileContains "install-yarn-binary-time=" $log_file
assertFileContains "node-build-success=true" $log_file
# log resolve logic dark-launch
assertFileContains "resolve-matches-nodebin-yarn=true" $log_file
assertFileContains "resolve-matches-nodebin-node=true" $log_file
}
testFailingBuildMetaData() {
local log_file=$(mktemp)
BUILDPACK_LOG_FILE="$log_file" compile "bad-json"
assertFileContains "failure=invalid-package-json" $log_file
assertCapturedError
echo "" > $log_file
BUILDPACK_LOG_FILE="$log_file" compile "yarn-lockfile-out-of-date"
assertFileContains "failure=outdated-yarn-lockfile" $log_file
assertCapturedError
}
testPropagateAppUUID() {
env_dir=$(mktmpdir)
local log_file=$(mktemp)
local cache_dir=${2:-$(mktmpdir)}
echo "$log_file" > $env_dir/BUILDPACK_LOG_FILE
# save the generated app-uuid for the first build
compile "node-10" $cache_dir $env_dir
assertFileContains "app-uuid=" $log_file
local uuid=$(cat $log_file | sed -n -e 's/^.*app-uuid=\([^ ]*\).*/\1/p')
# create a new log file
log_file=$(mktemp)
echo "$log_file" > $env_dir/BUILDPACK_LOG_FILE
# recompile with the same cache directory
compile "node-10" $cache_dir $env_dir
assertFileContains "app-uuid" $log_file
# make sure that the app-uuid is the same
assertEquals "$uuid" "$(cat $log_file | sed -n -e 's/^.*app-uuid=\([^ ]*\).*/\1/p')"
}
testBinDetectWarnings() {
detect "slugignore-package-json"
assertCapturedError "'package.json' listed in '.slugignore' file"
......@@ -1093,6 +1185,15 @@ compile() {
capture ${bp_dir}/bin/compile ${compile_dir} ${2:-$(mktmpdir)} $3
}
testCompile() {
default_process_types_cleanup
bp_dir=$(mktmpdir)
compile_dir=$(mktmpdir)
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$1/. ${compile_dir}
capture ${bp_dir}/bin/test-compile ${compile_dir} ${2:-$(mktmpdir)} $3
}
# This is meant to be run after `compile`. `cleanupStartup` must be run
# after this function is called before other tests are executed
executeStartup() {
......
......@@ -108,6 +108,26 @@ testKeyValue() {
assertEquals "" "$(kv_list $store)"
}
testKeyValueNoNewLine() {
local store
# use a fixture that does not have an empty line after the final entry
store="$(pwd)/test/unit-fixtures/kvstore/no-new-line"
assertEquals "$(printf "%s\n" a=b b=c)" "$(kv_list $store)"
assertEquals "$(printf "%s\n" a b)" "$(kv_keys $store)"
}
testKeyValueEmptyLine() {
local store
# use a fixture that has an extra empty line
store="$(pwd)/test/unit-fixtures/kvstore/empty-line"
assertEquals "$(printf "%s\n" a=b b=c)" "$(kv_list $store)"
assertEquals "$(printf "%s\n" a b)" "$(kv_keys $store)"
}
testKeyValueEscaping() {
local store=$(mktemp)
......@@ -151,21 +171,71 @@ testKeyValueNoFile() {
testBuildData() {
local cache_dir=$(mktemp -d)
bd_create $cache_dir
meta_create $cache_dir
bd_set "test" "foo"
assertEquals "test=foo" "$(log_build_data)"
meta_set "test" "foo"
assertEquals "test=foo" "$(log_meta_data)"
bd_set "test" "different-foo"
assertEquals "test=different-foo" "$(log_build_data)"
meta_set "test" "different-foo"
assertEquals "test=different-foo" "$(log_meta_data)"
bd_set "foo" "value with spaces"
assertEquals "foo=\"value with spaces\" test=different-foo" "$(log_build_data)"
meta_set "foo" "value with spaces"
assertEquals "foo=\"value with spaces\" test=different-foo" "$(log_meta_data)"
# values are printed with the keys sorted alphabetically
# this isn't required, and this test serves as documentation
bd_set "a" "this should come first"
assertEquals "a=\"this should come first\" foo=\"value with spaces\" test=different-foo" "$(log_build_data)"
meta_set "a" "this should come first"
assertEquals "a=\"this should come first\" foo=\"value with spaces\" test=different-foo" "$(log_meta_data)"
# dates generated by running `nowms; sleep 10; nowms`
meta_time "time" "1545178120033" "1545178130043"
assertEquals "10.010" "$(meta_get time)"
# dates generated by running `nowms; sleep 1; nowms`
meta_time "time" "1545178503025" "1545178504027"
assertEquals "1.002" "$(meta_get time)"
# dates generated by running `nowms; sleep 30; nowms`
meta_time "time" "1545178521204" "1545178551206"
assertEquals "30.002" "$(meta_get time)"
}
testBuildDataPreviousBuild() {
local cache_dir=$(mktemp -d)
# the first time, there will be no previous build file
meta_create "$cache_dir"
assertContains "nodejs" "$BUILD_DATA_FILE"
assertContains "nodejs-prev" "$PREVIOUS_BUILD_DATA_FILE"
assertFileExists "$BUILD_DATA_FILE"
# set a value in the build data file
meta_set "test" "foo"
assertFileContains "test=foo" "$BUILD_DATA_FILE"
assertFileDoesNotExist "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_get test)" "foo"
assertEquals "$(meta_prev_get test)" ""
# the second time this is called (cache restored)
# there will be a previous build file
meta_create "$cache_dir"
assertFileExists "$BUILD_DATA_FILE"
assertFileExists "$PREVIOUS_BUILD_DATA_FILE"
# the data stored in the previous build should now be in the second file
assertFileNotContains "test=foo" "$BUILD_DATA_FILE"
assertFileContains "test=foo" "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_get test)" ""
assertEquals "$(meta_prev_get test)" "foo"
meta_set "test" "bar"
# doing it once more does not result in an error
meta_create "$cache_dir"
assertFileExists "$BUILD_DATA_FILE"
assertFileExists "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_prev_get test)" "bar"
assertEquals "$(meta_get test)" ""
}
testWebConcurrencyProfileScript() {
......@@ -210,15 +280,103 @@ testWebConcurrencyProfileScript() {
assertEquals "1" "$(calculate_concurrency 512 1)"
}
isUUID() {
if [[ ${1//-/} =~ ^[[:xdigit:]]{32}$ ]]; then
echo true
else
echo false
fi
}
testUUID() {
local first second
first=$(uuid)
second=$(uuid)
assertNotEquals "$first" "$second"
assertEquals "true" "$(isUUID "$first")"
assertEquals "true" "$(isUUID "$second")"
}
testUUIDFallback() {
local first second
first=$(uuid_fallback)
second=$(uuid_fallback)
assertNotEquals "$first" "$second"
assertEquals "true" "$(isUUID "$first")"
assertEquals "true" "$(isUUID "$second")"
}
testHasScript() {
local file="$(pwd)/test/fixtures/has-script-fixtures/package.json"
assertEquals "true" "$(has_script "$file" "build")"
assertEquals "true" "$(has_script "$file" "heroku-postbuild")"
assertEquals "false" "$(has_script "$file" "postinstall")"
assertEquals "true" "$(has_script "$file" "random-script-name")"
}
testExperiments() {
local schema="$(pwd)/test/unit-fixtures/experiments/experiments-v1"
local schema_next="$(pwd)/test/unit-fixtures/experiments/experiments-v1-next"
local schema_v2="$(pwd)/test/unit-fixtures/experiments/experiments-v2"
local cache_dir=$(mktemp -d)
local val
experiments_init "nodejs" "$cache_dir" "$schema"
# these should always be the same
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
# this will change, but stay the same between runs
val="$(experiments_get "ab-test")"
# pretend this is the next time this build is run
experiments_init "nodejs" "$cache_dir" "$schema"
# these should always be the same
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
# val should be the same as it was before
assertEquals "$val" "$(experiments_get "ab-test")"
# now we add a new feature to the schema
experiments_init "nodejs" "$cache_dir" "$schema_next"
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
assertEquals "$val" "$(experiments_get "ab-test")"
assertEquals "true" "$(experiments_get "new-always-on")"
# reset the schema
experiments_init "nodejs" "$cache_dir" "$schema_v2"
assertNotNull "$(experiments_get "new-feature")"
assertNull "$(experiments_get "all-on")"
assertNull "$(experiments_get "all-off")"
assertNull "$(experiments_get "ab-test")"
assertNull "$(experiments_get "new-always-on")"
}
BP_DIR="$(pwd)"
# mocks
source "$(pwd)"/test/mocks/stdlib.sh
# the modules to be tested
source "$(pwd)"/lib/uuid.sh
source "$(pwd)"/lib/environment.sh
source "$(pwd)"/lib/json.sh
source "$(pwd)"/lib/json.sh
source "$(pwd)"/lib/monitor.sh
source "$(pwd)"/lib/output.sh
source "$(pwd)"/lib/kvstore.sh
source "$(pwd)"/lib/build-data.sh
source "$(pwd)"/lib/experiments.sh
source "$(pwd)"/lib/metadata.sh
source "$(pwd)"/profile/WEB_CONCURRENCY.sh
# testing utils
source "$(pwd)"/test/utils
# import the testing framework
source "$(pwd)"/test/shunit2
#version=1
ab-test=50
all-on=100
all-off=0
#version=1
ab-test=50
all-on=100
all-off=0
new-always-on=100
a=b
b=c
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment