Commit db0e7358 authored by Unknown's avatar Unknown

Merge remote-tracking branch 'upstream/master'

parents e87014e5 76ba1980
language: bash language: go
go:
- "1.12"
sudo: required sudo: required
services: services:
- docker - docker
env: env:
- TEST=shellcheck
- TEST=heroku-18 STACK=heroku-18 - TEST=heroku-18 STACK=heroku-18
- TEST=heroku-16 STACK=heroku-16 - TEST=heroku-16 STACK=heroku-16
- TEST=cedar-14 STACK=cedar-14 - TEST=cedar-14 STACK=cedar-14
- TEST=hatchet - TEST=hatchet
- TEST=unit - TEST=unit
- TEST=test-binary GO111MODULE=on
install: install:
- if [[ -n $STACK ]]; then - if [[ -n $STACK ]]; then
docker pull "heroku/${STACK/-/:}"; docker pull "heroku/${STACK/-/:}";
......
...@@ -2,6 +2,64 @@ ...@@ -2,6 +2,64 @@
## master ## master
## v148 (2019-05-02)
- Dark-launch new semver matching logic for node binaries (#663)
## v147 (2019-05-01)
- Dark-launch new semver matching logic for yarn binaries (#661)
- Add node 12.x as a supported version for Node Metrics beta (#662)
## v146 (2019-04-25)
- Deprecate io.js as an alternative runtime (#658)
- Prototyping new version resolution approach to replace Nodebin (#649 - #657)
## v145 (2019-04-16)
- Separate prebuild step in log output (#646)
- Clean up script metrics and logging (#647)
## v144 (2019-04-08)
- Remove temporary wraning about "run build" change (#644)
## v143 (2019-03-28)
- Internal logging changes (#637, #631, #630)
## v142 (2019-03-11)
- Add temporary warning about "run build" when the build fails as well (#639)
## v141 (2019-03-11)
- Add temporary warning about "run build" change to log output (#636)
## v140 (2019-03-11)
- Run the build script by default (#628)
## v139 (2019-03-04)
- Make breaking change warning header brighter (#627)
## v138 (2019-02-20)
- Add new Build header (#625)
- Fix yarn run error when script is empty string (#624)
## v137 (2019-02-14)
- Internal logging changes (#620, #618, #621)
- Detect build scripts even when they are empty (#617)
## v136 (2019-02-09)
- Add warning for the upcoming run build change (#616)
## v135 (2019-02-06)
- Fix bug where failing builds on CI would not fail CI (#613)
- Internal logging changes (#596, #600)
## v134 (2018-12-20)
- Internal changes (#593, #591)
- Handle `$MEMORY_AVAILABLE` when `memory.limit_in_bytes` is nonsensically large (#531)
## v133 (2018-11-28)
- Add warning for flatmap-stream 404 failure (#590)
## v132 (2018-11-12) ## v132 (2018-11-12)
- Quietly add new build script behavior behind a flag (#584, #585) - Quietly add new build script behavior behind a flag (#584, #585)
...@@ -66,6 +124,7 @@ ...@@ -66,6 +124,7 @@
## v119 (2018-02-28) ## v119 (2018-02-28)
- Install and prune devDependencies by default (#519) - Install and prune devDependencies by default (#519)
- [Breaking] Stop setting the env var `NPM_CONFIG_PRODUCTION=true` by default
## v118 (2018-02-02) ## v118 (2018-02-02)
......
...@@ -86,3 +86,8 @@ make test-heroku-16 ...@@ -86,3 +86,8 @@ make test-heroku-16
The tests are run via the vendored The tests are run via the vendored
[shunit2](https://github.com/kward/shunit2) [shunit2](https://github.com/kward/shunit2)
test framework. test framework.
## Updating go binaries
If you would like to develop and update the go binaries you will need to install
[go 1.12](https://golang.org/doc/install#install) and [upx](https://upx.github.io/)
This diff is collapsed.
...@@ -17,7 +17,7 @@ error() { ...@@ -17,7 +17,7 @@ error() {
exit 1 exit 1
} }
if [ -f $1/package.json ]; then if [ -f "$1/package.json" ]; then
echo 'Node.js' echo 'Node.js'
exit 0 exit 0
fi fi
...@@ -59,7 +59,7 @@ If you are trying to deploy a Node.js application, ensure that this ...@@ -59,7 +59,7 @@ If you are trying to deploy a Node.js application, ensure that this
file is present at the top level directory. This directory has the file is present at the top level directory. This directory has the
following files: following files:
$(ls -1p $1) $(ls -1p "$1")
If you are trying to deploy an application written in another If you are trying to deploy an application written in another
language, you need to change the list of buildpacks set on your language, you need to change the list of buildpacks set on your
......
#!/usr/bin/env bash #!/usr/bin/env bash
# bin/test-compile <build-dir> <cache-dir> <env-dir>
BP_DIR=$(cd "$(dirname ${0:-})"; cd ..; pwd) ### Configure environment
source $BP_DIR/lib/environment.sh set -o errexit # always exit on error
set -o pipefail # don't ignore exit codes when piping output
### Configure directories
BP_DIR=$(cd "$(dirname "${0:-}")" || exit; cd ..; pwd)
### Load dependencies
# shellcheck source=lib/environment.sh
source "$BP_DIR/lib/environment.sh"
### Set up test Node environment
export NPM_CONFIG_PRODUCTION=${NPM_CONFIG_PRODUCTION:-false} export NPM_CONFIG_PRODUCTION=${NPM_CONFIG_PRODUCTION:-false}
export NODE_ENV=${NODE_ENV:-test} export NODE_ENV=${NODE_ENV:-test}
"$BP_DIR/bin/compile" "$1" "$2" "$3"
### Compile the app
"$BP_DIR/bin/compile" "$1" "$2" "$3"
write_ci_profile "$BP_DIR" "$1" write_ci_profile "$BP_DIR" "$1"
#!/usr/bin/env bash
export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin" export PATH="$HOME/.heroku/node/bin:$HOME/.heroku/yarn/bin:$PATH:$HOME/bin:$HOME/node_modules/.bin"
export NODE_HOME="$HOME/.heroku/node" export NODE_HOME="$HOME/.heroku/node"
export NODE_ENV=${NODE_ENV:-test} export NODE_ENV=${NODE_ENV:-test}
package main
import (
"encoding/xml"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"regexp"
"runtime"
"sort"
"strings"
"time"
"github.com/Masterminds/semver"
)
type result struct {
Name string `xml:"Name"`
KeyCount int `xml:"KeyCount"`
MaxKeys int `xml:"MaxKeys"`
IsTruncated bool `xml:"IsTruncated"`
ContinuationToken string `xml:"ContinuationToken"`
NextContinuationToken string `xml:"NextContinuationToken"`
Prefix string `xml:"Prefix"`
Contents []s3Object `xml:"Contents"`
}
type s3Object struct {
Key string `xml:"Key"`
LastModified time.Time `xml:"LastModified"`
ETag string `xml:"ETag"`
Size int `xml:"Size"`
StorageClass string `xml:"StorageClass"`
}
type release struct {
binary string
stage string
platform string
url string
version *semver.Version
}
type matchResult struct {
versionRequirement string
release release
matched bool
}
func main() {
if len(os.Args) < 3 {
printUsage()
os.Exit(0)
}
binary := os.Args[1]
versionRequirement := os.Args[2]
if binary == "node" {
objects, err := listS3Objects("heroku-nodebin", "node")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
result, err := resolveNode(objects, getPlatform(), versionRequirement)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if result.matched {
fmt.Printf("%s %s\n", result.release.version.String(), result.release.url)
} else {
fmt.Println("No result")
os.Exit(1)
}
} else if binary == "yarn" {
objects, err := listS3Objects("heroku-nodebin", "yarn")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
result, err := resolveYarn(objects, versionRequirement)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if result.matched {
fmt.Printf("%s %s\n", result.release.version.String(), result.release.url)
} else {
fmt.Println("No result")
os.Exit(1)
}
}
}
func printUsage() {
fmt.Println("resolve-version binary version-requirement")
}
func getPlatform() string {
if runtime.GOOS == "darwin" {
return "darwin-x64"
}
return "linux-x64"
}
func resolveNode(objects []s3Object, platform string, versionRequirement string) (matchResult, error) {
releases := []release{}
staging := []release{}
for _, obj := range objects {
release, err := parseObject(obj.Key)
if err != nil {
continue
}
// ignore any releases that are not for the given platform
if release.platform != platform {
continue
}
if release.stage == "release" {
releases = append(releases, release)
} else {
staging = append(staging, release)
}
}
result, err := matchReleaseSemver(releases, versionRequirement)
if err != nil {
return matchResult{}, err
}
// In order to accomodate integrated testing of staged Node binaries before they are
// released broadly, there is a special case where:
//
// - if there is no match to a Node binary AND
// - an exact version of a binary in `node/staging` is present
//
// the staging binary is used
if result.matched == false {
stagingResult := matchReleaseExact(staging, versionRequirement)
if stagingResult.matched {
return stagingResult, nil
}
}
return result, nil
}
func resolveYarn(objects []s3Object, versionRequirement string) (matchResult, error) {
releases := []release{}
for _, obj := range objects {
release, err := parseObject(obj.Key)
if err != nil {
continue
}
releases = append(releases, release)
}
return matchReleaseSemver(releases, versionRequirement)
}
func matchReleaseSemver(releases []release, versionRequirement string) (matchResult, error) {
rewrittenRequirement := rewriteRange(versionRequirement)
constraints, err := semver.NewConstraint(rewrittenRequirement)
if err != nil {
return matchResult{}, err
}
filtered := []release{}
for _, release := range releases {
if constraints.Check(release.version) {
filtered = append(filtered, release)
}
}
versions := make([]*semver.Version, len(filtered))
for i, rel := range filtered {
versions[i] = rel.version
}
coll := semver.Collection(versions)
sort.Sort(coll)
if len(coll) == 0 {
return matchResult{
versionRequirement: versionRequirement,
release: release{},
matched: false,
}, nil
}
resolvedVersion := coll[len(coll)-1]
for _, rel := range filtered {
if rel.version.Equal(resolvedVersion) {
return matchResult{
versionRequirement: versionRequirement,
release: rel,
matched: true,
}, nil
}
}
return matchResult{}, errors.New("Unknown error")
}
func matchReleaseExact(releases []release, version string) matchResult {
for _, release := range releases {
if release.version.String() == version {
return matchResult{
versionRequirement: version,
release: release,
matched: true,
}
}
}
return matchResult{
versionRequirement: version,
release: release{},
matched: false,
}
}
// Parses an S3 key into a struct of information about that release
// Example input: node/release/linux-x64/node-v6.2.2-linux-x64.tar.gz
func parseObject(key string) (release, error) {
nodeRegex := regexp.MustCompile("node\\/([^\\/]+)\\/([^\\/]+)\\/node-v([0-9]+\\.[0-9]+\\.[0-9]+)-([^.]*)(.*)\\.tar\\.gz")
yarnRegex := regexp.MustCompile("yarn\\/([^\\/]+)\\/yarn-v([0-9]+\\.[0-9]+\\.[0-9]+)\\.tar\\.gz")
if nodeRegex.MatchString(key) {
match := nodeRegex.FindStringSubmatch(key)
version, err := semver.NewVersion(match[3])
if err != nil {
return release{}, fmt.Errorf("Failed to parse version as semver:%s\n%s", match[3], err.Error())
}
return release{
binary: "node",
stage: match[1],
platform: match[2],
version: version,
url: fmt.Sprintf("https://s3.amazonaws.com/%s/node/%s/%s/node-v%s-%s.tar.gz", "heroku-nodebin", match[1], match[2], match[3], match[2]),
}, nil
}
if yarnRegex.MatchString(key) {
match := yarnRegex.FindStringSubmatch(key)
version, err := semver.NewVersion(match[2])
if err != nil {
return release{}, errors.New("Failed to parse version as semver")
}
return release{
binary: "yarn",
stage: match[1],
platform: "",
url: fmt.Sprintf("https://s3.amazonaws.com/heroku-nodebin/yarn/release/yarn-v%s.tar.gz", version),
version: version,
}, nil
}
return release{}, fmt.Errorf("Failed to parse key: %s", key)
}
// Wrapper around the S3 API for listing objects
// This maps directly to the API and parses the XML response but will not handle
// paging and offsets automaticaly
func fetchS3Result(bucketName string, options map[string]string) (result, error) {
var result result
v := url.Values{}
v.Set("list-type", "2")
for key, val := range options {
v.Set(key, val)
}
url := fmt.Sprintf("https://%s.s3.amazonaws.com?%s", bucketName, v.Encode())
resp, err := http.Get(url)
if err != nil {
return result, err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return result, err
}
return result, xml.Unmarshal(body, &result)
}
// Query the S3 API for a list of all the objects in an S3 bucket with a
// given prefix. This will handle the inherent 1000 item limit and paging
// for you
func listS3Objects(bucketName string, prefix string) ([]s3Object, error) {
var out = []s3Object{}
var options = map[string]string{"prefix": prefix}
for {
result, err := fetchS3Result(bucketName, options)
if err != nil {
return nil, err
}
out = append(out, result.Contents...)
if !result.IsTruncated {
break
}
options["continuation-token"] = result.NextContinuationToken
}
return out, nil
}
// regex matching the semver version definitions
// Ex:
// v1.0.0
// 9
// 8.x
const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
// regex matching the semver operators
const ops string = `=<|~>|!=|>|<|>=|=>|<=|\^|=|~`
// Masterminds/semver does not support constraints like: `>1 <2`, preferring
// `>1, <2` with a comma separator. This catches this particular case and
// rewrites it
func rewriteRange(c string) string {
constraintRangeRegex := regexp.MustCompile(fmt.Sprintf(
`^\s*(%s)(\s*%s)\s*(%s)(\s*%s)$`,
ops, cvRegex, ops, cvRegex,
))
ors := strings.Split(c, "||")
out := make([]string, len(ors))
for i, v := range ors {
m := constraintRangeRegex.FindStringSubmatch(v)
if m != nil {
out[i] = fmt.Sprintf("%s%s, %s%s", m[1], m[2], m[12], m[13])
} else {
out[i] = v
}
}
return strings.Join(out, `||`)
}
// +build integration
package main
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestListS3Objects(t *testing.T) {
// Node
objects, err := listS3Objects("heroku-nodebin", "node")
assert.Nil(t, err)
assert.NotEmpty(t, objects)
// every returned result started with "node"
for _, obj := range objects {
assert.Regexp(t, regexp.MustCompile("^node"), obj.Key)
}
// every node object must parse as a valid release
for _, obj := range objects {
release, err := parseObject(obj.Key)
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("https:\\/\\/s3.amazonaws.com\\/heroku-nodebin"), release.url)
assert.Regexp(t, regexp.MustCompile("[0-9]+.[0-9]+.[0-9]+"), release.version.String())
}
// Yarn
objects, err = listS3Objects("heroku-nodebin", "yarn")
assert.Nil(t, err)
assert.NotEmpty(t, objects)
// every returned result started with "yarn"
for _, obj := range objects {
assert.Regexp(t, regexp.MustCompile("^yarn"), obj.Key)
}
// every yarn object must parse as a valid release
for _, obj := range objects {
release, err := parseObject(obj.Key)
assert.Nil(t, err)
assert.Regexp(t, regexp.MustCompile("https:\\/\\/s3.amazonaws.com\\/heroku-nodebin"), release.url)
assert.Regexp(t, regexp.MustCompile("[0-9]+.[0-9]+.[0-9]+"), release.version.String())
}
}
This diff is collapsed.
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
[ "$CI" != "true" ] && echo "Not running on CI!" && exit 1 [ "$CI" != "true" ] && echo "Not running on CI!" && exit 1
git config --global user.email ${HEROKU_API_USER:-"buildpack@example.com"} git config --global user.email "${HEROKU_API_USER:-"buildpack@example.com"}"
git config --global user.name 'BuildpackTester' git config --global user.name 'BuildpackTester'
cat <<EOF >> ~/.ssh/config cat <<EOF >> ~/.ssh/config
......
...@@ -26,14 +26,16 @@ if [ -z "$HEROKU_API_KEY" ]; then ...@@ -26,14 +26,16 @@ if [ -z "$HEROKU_API_KEY" ]; then
fi fi
if [ -n "$CIRCLE_BRANCH" ]; then if [ -n "$CIRCLE_BRANCH" ]; then
export HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH" HATCHET_BUILDPACK_BRANCH="$CIRCLE_BRANCH"
elif [ -n "$TRAVIS_PULL_REQUEST_BRANCH" ]; then elif [ -n "$TRAVIS_PULL_REQUEST_BRANCH" ]; then
export IS_RUNNING_ON_TRAVIS=true export IS_RUNNING_ON_TRAVIS=true
export HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH" HATCHET_BUILDPACK_BRANCH="$TRAVIS_PULL_REQUEST_BRANCH"
else else
export HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##') HATCHET_BUILDPACK_BRANCH=$(git name-rev HEAD 2> /dev/null | sed 's#HEAD\ \(.*\)#\1#' | sed 's#tags\/##')
fi fi
export HATCHET_BUILDPACK_BRANCH
gem install bundler gem install bundler
bundle install bundle install
......
...@@ -7,7 +7,7 @@ BP_NAME=${1:-"heroku/nodejs"} ...@@ -7,7 +7,7 @@ BP_NAME=${1:-"heroku/nodejs"}
curVersion=$(heroku buildpacks:versions "$BP_NAME" | awk 'FNR == 3 { print $1 }') curVersion=$(heroku buildpacks:versions "$BP_NAME" | awk 'FNR == 3 { print $1 }')
newVersion="v$((curVersion + 1))" newVersion="v$((curVersion + 1))"
read -p "Deploy as version: $newVersion [y/n]? " choice read -r -p "Deploy as version: $newVersion [y/n]? " choice
case "$choice" in case "$choice" in
y|Y ) echo "";; y|Y ) echo "";;
n|N ) exit 0;; n|N ) exit 0;;
......
module github.com/heroku/heroku-buildpack-nodejs
go 1.12
require (
github.com/Masterminds/semver v1.4.2
github.com/stretchr/testify v1.3.0
)
github.com/Masterminds/semver v1.4.2 h1:WBLTQ37jOCzSLtXNdoo8bNM8876KhNqOKvrlGITgsTc=
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
#!/usr/bin/env bash
RESOLVE="$BP_DIR/vendor/resolve-version-$(get_os)"
install_yarn() { install_yarn() {
local dir="$1" local dir="$1"
local version=${2:-1.x} local version=${2:-1.x}
local number local platform="$3"
local url local number url code nodebin_result resolve_result
echo "Resolving yarn version $version..." echo "Resolving yarn version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/yarn/$platform/latest.txt"); then nodebin_result=$(curl --fail --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/yarn/$platform/latest.txt" || echo "failed")
fail_bin_install yarn $version; resolve_result=$($RESOLVE yarn "$version" || echo "failed")
if [[ "$nodebin_result" == "failed" ]]; then
fail_bin_install yarn "$version" "$platform"
fi
read -r number url < <(echo "$nodebin_result")
# log out whether the new logic matches the old logic
if [[ "$nodebin_result" != "$resolve_result" ]]; then
meta_set "resolve-matches-nodebin-yarn" "false"
else
meta_set "resolve-matches-nodebin-yarn" "true"
fi
# log out when the new logic fails
if [[ "$resolve_result" == "failed" ]]; then
meta_set "resolve-failed-yarn" "true"
fi fi
echo "Downloading and installing yarn ($number)..." echo "Downloading and installing yarn ($number)..."
local code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/yarn.tar.gz --write-out "%{http_code}") code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/yarn.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then if [ "$code" != "200" ]; then
echo "Unable to download yarn: $code" && false echo "Unable to download yarn: $code" && false
fi fi
rm -rf $dir rm -rf "$dir"
mkdir -p "$dir" mkdir -p "$dir"
# https://github.com/yarnpkg/yarn/issues/770 # https://github.com/yarnpkg/yarn/issues/770
if tar --version | grep -q 'gnu'; then if tar --version | grep -q 'gnu'; then
...@@ -22,54 +43,58 @@ install_yarn() { ...@@ -22,54 +43,58 @@ install_yarn() {
else else
tar xzf /tmp/yarn.tar.gz -C "$dir" --strip 1 tar xzf /tmp/yarn.tar.gz -C "$dir" --strip 1
fi fi
chmod +x $dir/bin/* chmod +x "$dir"/bin/*
echo "Installed yarn $(yarn --version)" echo "Installed yarn $(yarn --version)"
} }
install_nodejs() { install_nodejs() {
local version=${1:-10.x} local version=${1:-10.x}
local dir="${2:?}" local dir="${2:?}"
local platform="$3"
local code os cpu nodebin_result resolve_result
os=$(get_os)
cpu=$(get_cpu)
echo "Resolving node version $version..." echo "Resolving node version $version..."
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt"); then nodebin_result=$(curl --silent --fail --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/node/$platform/latest.txt" || echo "failed")
fail_bin_install node $version; resolve_result=$($RESOLVE node "$version" || echo "failed")
fi
echo "Downloading and installing node $number..." read -r number url < <(echo "$nodebin_result")
local code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/node.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then if [[ "$nodebin_result" == "failed" ]]; then
echo "Unable to download node: $code" && false fail_bin_install node "$version" "$platform"
fi fi
tar xzf /tmp/node.tar.gz -C /tmp
rm -rf "$dir"/*
mv /tmp/node-v$number-$os-$cpu/* $dir
chmod +x $dir/bin/*
}
install_iojs() { # log out whether the new logic matches the old logic
local version="$1" if [[ "$nodebin_result" != "$resolve_result" ]]; then
local dir="$2" meta_set "resolve-matches-nodebin-node" "false"
else
meta_set "resolve-matches-nodebin-node" "true"
fi
echo "Resolving iojs version ${version:-(latest stable)}..." # log out when the new logic fails
if ! read number url < <(curl --silent --get --retry 5 --retry-max-time 15 --data-urlencode "range=$version" "https://nodebin.jxltom.com/v1/iojs/$platform/latest.txt"); then if [[ "$resolve_result" == "failed" ]]; then
fail_bin_install iojs $version; meta_set "resolve-failed-node" "true"
fi fi
echo "Downloading and installing iojs $number..." echo "Downloading and installing node $number..."
local code=$(curl "$url" --silent --fail --retry 5 --retry-max-time 15 -o /tmp/iojs.tar.gz --write-out "%{http_code}") code=$(curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 -o /tmp/node.tar.gz --write-out "%{http_code}")
if [ "$code" != "200" ]; then if [ "$code" != "200" ]; then
echo "Unable to download iojs: $code" && false echo "Unable to download node: $code" && false
fi fi
tar xzf /tmp/iojs.tar.gz -C /tmp tar xzf /tmp/node.tar.gz -C /tmp
mv /tmp/iojs-v$number-$os-$cpu/* $dir rm -rf "${dir:?}"/*
chmod +x $dir/bin/* mv /tmp/node-v"$number"-"$os"-"$cpu"/* "$dir"
chmod +x "$dir"/bin/*
} }
install_npm() { install_npm() {
local npm_version
local version="$1" local version="$1"
local dir="$2" local dir="$2"
local npm_lock="$3" local npm_lock="$3"
local npm_version="$(npm --version)" npm_version="$(npm --version)"
# If the user has not specified a version of npm, but has an npm lockfile # If the user has not specified a version of npm, but has an npm lockfile
# upgrade them to npm 5.x if a suitable version was not installed with Node # upgrade them to npm 5.x if a suitable version was not installed with Node
......
# variable shared by this whole module
BUILD_DATA_FILE=""
bd_create() {
local cache_dir="$1"
BUILD_DATA_FILE="$cache_dir/build-data/node"
kv_create $BUILD_DATA_FILE
}
bd_get() {
kv_get $BUILD_DATA_FILE "$1"
}
bd_set() {
kv_set $BUILD_DATA_FILE "$1" "$2"
}
log_build_data() {
# print all values on one line in logfmt format
# https://brandur.org/logfmt
echo $(kv_list $BUILD_DATA_FILE)
}
#!/usr/bin/env bash
log_initial_state() {
meta_set "buildpack" "nodejs"
if "$YARN"; then
meta_set "node-package-manager" "yarn"
meta_set "has-node-lock-file" "true"
else
meta_set "node-package-manager" "npm"
meta_set "has-node-lock-file" "$NPM_LOCK"
fi
meta_set "stack" "$STACK"
}
generate_uuids() {
# generate a unique id for each build
meta_set "build-uuid" "$(uuid)"
# propagate an app-uuid forward unless the cache is cleared
if [[ -n "$(meta_prev_get "app-uuid")" ]]; then
meta_set "app-uuid" "$(meta_prev_get "app-uuid")"
else
meta_set "app-uuid" "$(uuid)"
fi
}
log_build_script_opt_in() {
local opted_in="$1"
local build_dir="$2"
local has_build_script has_heroku_build_script
has_build_script=$(read_json "$build_dir/package.json" ".scripts.build")
has_heroku_build_script=$(read_json "$build_dir/package.json" ".scripts[\"heroku-postbuild\"]")
# if this app will be affected by the change
if [[ -z "$has_heroku_build_script" ]] && [[ -n "$has_build_script" ]]; then
mcount "affected-by-build-change"
if [[ "$opted_in" = "true" ]]; then
mcount "affected-by-build-change-opted-in"
meta_set "affected-but-opted-in" "true"
else
meta_set "affected-but-opted-in" "false"
fi
fi
if [[ "$opted_in" = true ]]; then
meta_set "build-script-opt-in" "true"
else
meta_set "build-script-opt-in" "false"
fi
}
\ No newline at end of file
source $BP_DIR/lib/binaries.sh #!/usr/bin/env bash
create_signature() { create_signature() {
echo "v2; ${STACK}; $(node --version); $(npm --version); $(yarn --version 2>/dev/null || true); ${PREBUILD}" echo "v2; ${STACK}; $(node --version); $(npm --version); $(yarn --version 2>/dev/null || true); ${PREBUILD}"
} }
save_signature() { save_signature() {
create_signature > $CACHE_DIR/node/signature local cache_dir="$1"
create_signature > "$cache_dir/node/signature"
} }
load_signature() { load_signature() {
if test -f $CACHE_DIR/node/signature; then local cache_dir="$1"
cat $CACHE_DIR/node/signature if test -f "$cache_dir/node/signature"; then
cat "$cache_dir/node/signature"
else else
echo "" echo ""
fi fi
} }
get_cache_status() { get_cache_status() {
local cache_dir="$1"
if ! ${NODE_MODULES_CACHE:-true}; then if ! ${NODE_MODULES_CACHE:-true}; then
echo "disabled" echo "disabled"
elif ! test -d "${CACHE_DIR}/node/"; then elif ! test -d "$cache_dir/node/"; then
echo "not-found" echo "not-found"
elif [ "$(create_signature)" != "$(load_signature)" ]; then elif [ "$(create_signature)" != "$(load_signature "$cache_dir")" ]; then
echo "new-signature" echo "new-signature"
else else
echo "valid" echo "valid"
...@@ -29,8 +32,10 @@ get_cache_status() { ...@@ -29,8 +32,10 @@ get_cache_status() {
} }
get_cache_directories() { get_cache_directories() {
local dirs1=$(read_json "$BUILD_DIR/package.json" ".cacheDirectories | .[]?") local build_dir="$1"
local dirs2=$(read_json "$BUILD_DIR/package.json" ".cache_directories | .[]?") local dirs1 dirs2
dirs1=$(read_json "$build_dir/package.json" ".cacheDirectories | .[]?")
dirs2=$(read_json "$build_dir/package.json" ".cache_directories | .[]?")
if [ -n "$dirs1" ]; then if [ -n "$dirs1" ]; then
echo "$dirs1" echo "$dirs1"
...@@ -61,9 +66,11 @@ restore_default_cache_directories() { ...@@ -61,9 +66,11 @@ restore_default_cache_directories() {
} }
restore_custom_cache_directories() { restore_custom_cache_directories() {
local cache_directories
local build_dir=${1:-} local build_dir=${1:-}
local cache_dir=${2:-} local cache_dir=${2:-}
local cache_directories=("${@:3}") # Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):" echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):"
...@@ -83,9 +90,10 @@ restore_custom_cache_directories() { ...@@ -83,9 +90,10 @@ restore_custom_cache_directories() {
} }
clear_cache() { clear_cache() {
rm -rf $CACHE_DIR/node local cache_dir="$1"
mkdir -p $CACHE_DIR/node rm -rf "$cache_dir/node"
mkdir -p $CACHE_DIR/node/cache mkdir -p "$cache_dir/node"
mkdir -p "$cache_dir/node/cache"
} }
save_default_cache_directories() { save_default_cache_directories() {
...@@ -106,16 +114,21 @@ save_default_cache_directories() { ...@@ -106,16 +114,21 @@ save_default_cache_directories() {
# bower_components # bower_components
if [[ -e "$build_dir/bower_components" ]]; then if [[ -e "$build_dir/bower_components" ]]; then
mcount "cache.saved-bower-components" mcount "cache.saved-bower-components"
meta_set "cached-bower-components" "true"
echo "- bower_components" echo "- bower_components"
mkdir -p "$cache_dir/node/cache/bower_components" mkdir -p "$cache_dir/node/cache/bower_components"
cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/cache/bower_components")" cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/cache/bower_components")"
fi fi
meta_set "node-custom-cache-dirs" "false"
} }
save_custom_cache_directories() { save_custom_cache_directories() {
local cache_directories
local build_dir=${1:-} local build_dir=${1:-}
local cache_dir=${2:-} local cache_dir=${2:-}
local cache_directories=("${@:3}") # Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):" echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):"
...@@ -128,4 +141,6 @@ save_custom_cache_directories() { ...@@ -128,4 +141,6 @@ save_custom_cache_directories() {
echo "- $cachepath (nothing to cache)" echo "- $cachepath (nothing to cache)"
fi fi
done done
meta_set "node-custom-cache-dirs" "true"
} }
This diff is collapsed.
#!/usr/bin/env bash
get_os() { get_os() {
uname | tr A-Z a-z uname | tr '[:upper:]' '[:lower:]'
} }
get_cpu() { get_cpu() {
...@@ -10,10 +12,11 @@ get_cpu() { ...@@ -10,10 +12,11 @@ get_cpu() {
fi fi
} }
os=$(get_os) get_platform() {
cpu=$(get_cpu) os=$(get_os)
platform="$os-$cpu" cpu=$(get_cpu)
export JQ="$BP_DIR/vendor/jq-$os" echo "$os-$cpu"
}
create_default_env() { create_default_env() {
export NPM_CONFIG_LOGLEVEL=${NPM_CONFIG_LOGLEVEL:-error} export NPM_CONFIG_LOGLEVEL=${NPM_CONFIG_LOGLEVEL:-error}
...@@ -54,13 +57,15 @@ export_env_dir() { ...@@ -54,13 +57,15 @@ export_env_dir() {
if [ -d "$env_dir" ]; then if [ -d "$env_dir" ]; then
local whitelist_regex=${2:-''} local whitelist_regex=${2:-''}
local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'} local blacklist_regex=${3:-'^(PATH|GIT_DIR|CPATH|CPPATH|LD_PRELOAD|LIBRARY_PATH|LANG|BUILD_DIR)$'}
# shellcheck disable=SC2164
pushd "$env_dir" >/dev/null pushd "$env_dir" >/dev/null
for e in *; do for e in *; do
[ -e "$e" ] || continue [ -e "$e" ] || continue
echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" && echo "$e" | grep -E "$whitelist_regex" | grep -qvE "$blacklist_regex" &&
export "$e=$(cat $e)" export "$e=$(cat "$e")"
: :
done done
# shellcheck disable=SC2164
popd >/dev/null popd >/dev/null
fi fi
} }
...@@ -68,15 +73,15 @@ export_env_dir() { ...@@ -68,15 +73,15 @@ export_env_dir() {
write_profile() { write_profile() {
local bp_dir="$1" local bp_dir="$1"
local build_dir="$2" local build_dir="$2"
mkdir -p $build_dir/.profile.d mkdir -p "$build_dir/.profile.d"
cp $bp_dir/profile/* $build_dir/.profile.d/ cp "$bp_dir"/profile/* "$build_dir/.profile.d/"
} }
write_ci_profile() { write_ci_profile() {
local bp_dir="$1" local bp_dir="$1"
local build_dir="$2" local build_dir="$2"
write_profile "$1" "$2" write_profile "$1" "$2"
cp $bp_dir/ci-profile/* $build_dir/.profile.d/ cp "$bp_dir"/ci-profile/* "$build_dir/.profile.d/"
} }
write_export() { write_export() {
...@@ -86,8 +91,8 @@ write_export() { ...@@ -86,8 +91,8 @@ write_export() {
# only write the export script if the buildpack directory is writable. # only write the export script if the buildpack directory is writable.
# this may occur in situations outside of Heroku, such as running the # this may occur in situations outside of Heroku, such as running the
# buildpacks locally. # buildpacks locally.
if [ -w ${bp_dir} ]; then if [ -w "$bp_dir" ]; then
echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > $bp_dir/export echo "export PATH=\"$build_dir/.heroku/node/bin:$build_dir/.heroku/yarn/bin:\$PATH:$build_dir/node_modules/.bin\"" > "$bp_dir/export"
echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> $bp_dir/export echo "export NODE_HOME=\"$build_dir/.heroku/node\"" >> "$bp_dir/export"
fi fi
} }
#!/usr/bin/env bash
# This module is designed to be able to roll out experiments to a
# random segment of users for A/B testing. This takes as input a
# list of experiments along with % chance they will be enabled,
# decides which to enable, and persists these decisions into the
# application cache.
#
# This module takes in no outside data, so it is limited in it's
# uses. While an experiment can be persisted between builds for the
# same app, it cannot be consistent for a given user / team. Even
# different PR apps will be decided independently.
#
# This means that this should not be used for changing the build
# behavior of the buildpack. Builds should always work consistently
# no matter what experiments are turned on or off.
#
# Where this module can be useful is when deciding between two
# identical behaviors that may have performance trade-offs, or
# testing the efficacy of different messaging.
#
# Examples:
# testing two different caching strategies against each other
# showing guidance on a particular type of failure
#
# It is expected that these experiments will be short-lived
#
# Schema
#
# This module expects a "schema" file as input. This is used to
# make sure that all current experiments are documented in one
# place. The file is a list of key=value pairs on individual
# lines.
#
# There is a special "#version" key that is expected that can be
# used to invalidate any existing experiments.
#
# The key is the name, and the value is an integery between 0 and
# 100 inclusive that represents the likelyhood that the experiment
# will be turned on for any given app.
#
# Example:
# ```
# #version=1
# always-on=100 // this will always be turned on, not super useful
# ab-test=50 // this will be split 50/50
# small-test=5 // this will be turned on for 5% of apps
# ```
#
# See tests/unit-fixtures/experiments/experiments-v1 for an example
# variables shared by this whole module
EXPERIMENTS_DATA_FILE=""
experiments_init() {
local name="$1"
local cache_dir="$2"
local schema="$3"
local last_schema_version schema_version random odds
EXPERIMENTS_DATA_FILE="$cache_dir/experiments/$name"
last_schema_version="$(kv_get "$EXPERIMENTS_DATA_FILE" "#version")"
schema_version="$(kv_get "$schema" "#version")"
# If the schema has changed, blow away the current values
# and start fresh. This is essentially "wiping the slate clean"
# and no previous experiments will be enabled for anyone
#
# In the case that the schema version is the same, we keep
# all of the previously decided experiments (file is the same)
# and decide on any new ones
if [[ "$last_schema_version" != "$schema_version" ]]; then
kv_create "$EXPERIMENTS_DATA_FILE"
kv_clear "$EXPERIMENTS_DATA_FILE"
# save out the version we're using to generate this set of experiments
kv_set "$EXPERIMENTS_DATA_FILE" "#version" "$schema_version"
fi
# iterate through the schema and decide if each new experiment
# should be turned on or not
kv_keys "$schema" | tr ' ' '\n' | while read -r key; do
# skip the special version key
if [[ "$key" = "#version" ]]; then
continue
# skip any values that are already decided
elif [[ -n "$(kv_get "$EXPERIMENTS_DATA_FILE" "$key")" ]]; then
continue
else
# generate a random number between 0 and 100
random=$((RANDOM % 100))
# the value in the schema should be a number between 0 and 100 inclusive
odds=$(kv_get "$schema" "$key")
if [[ "$random" -lt "$odds" ]]; then
kv_set "$EXPERIMENTS_DATA_FILE" "$key" "true"
else
kv_set "$EXPERIMENTS_DATA_FILE" "$key" "false"
fi
fi
done
}
# Determine whether an experiment is enabled or disabled
# Must call experiments_init first
#
# Possible outputs: "true" "false" ""
experiments_get() {
kv_get "$EXPERIMENTS_DATA_FILE" "$1"
}
# Outputs a list of experiment names, one-per-line
experiments_list() {
kv_keys "$EXPERIMENTS_DATA_FILE"
}
This diff is collapsed.
#!/usr/bin/env bash
JQ="$BP_DIR/vendor/jq-$(get_os)"
read_json() { read_json() {
local file=$1 local file="$1"
local key=$2 local key="$2"
if test -f $file; then
cat $file | $JQ --raw-output "$key // \"\"" || return 1 if test -f "$file"; then
# shellcheck disable=SC2002
cat "$file" | $JQ --raw-output "$key // \"\"" || return 1
else else
echo "" echo ""
fi fi
} }
has_script() {
local file="$1"
local key="$2"
if test -f "$file"; then
# shellcheck disable=SC2002
cat "$file" | $JQ ".[\"scripts\"] | has(\"$key\")"
else
echo "false"
fi
}
is_invalid_json_file() {
local file="$1"
# shellcheck disable=SC2002
if ! cat "$file" | $JQ "." 1>/dev/null; then
echo "true"
else
echo "false"
fi
}
\ No newline at end of file
#!/usr/bin/env bash
kv_create() { kv_create() {
local f=$1 local f=$1
mkdir -p $(dirname $f) mkdir -p "$(dirname "$f")"
touch $f touch "$f"
} }
kv_clear() { kv_clear() {
local f=$1 local f=$1
echo "" > $f echo "" > "$f"
} }
kv_set() { kv_set() {
if [[ $# -eq 3 ]]; then if [[ $# -eq 3 ]]; then
local f=$1 local f=$1
if [[ -f $f ]]; then if [[ -f $f ]]; then
echo "$2=$3" >> $f echo "$2=$3" >> "$f"
fi fi
fi fi
} }
# get the value, but don't unwrap quotes
kv_get() { kv_get() {
if [[ $# -eq 2 ]]; then if [[ $# -eq 2 ]]; then
local f=$1 local f=$1
if [[ -f $f ]]; then if [[ -f $f ]]; then
grep "^$2=" $f | sed -e "s/^$2=//" | tail -n 1 grep "^$2=" "$f" | sed -e "s/^$2=//" | tail -n 1
fi fi
fi fi
} }
# get the value, but wrap it in quotes if it contains a space
kv_get_escaped() { kv_get_escaped() {
local value=$(kv_get $1 $2 $3) local value
value=$(kv_get "$1" "$2")
if [[ $value =~ [[:space:]]+ ]]; then if [[ $value =~ [[:space:]]+ ]]; then
echo "\"$value\"" echo "\"$value\""
else else
echo $value echo "$value"
fi fi
} }
...@@ -43,10 +45,18 @@ kv_keys() { ...@@ -43,10 +45,18 @@ kv_keys() {
local keys=() local keys=()
if [[ -f $f ]]; then if [[ -f $f ]]; then
# get list of keys # Iterate over each line, splitting on the '=' character
while IFS="=" read -r key value; do #
keys+=("$key") # The || [[ -n "$key" ]] statement addresses an issue with reading the last line
done < $f # of a file when there is no newline at the end. This will not happen if the file
# is created with this module, but can happen if it is written by hand.
# See: https://stackoverflow.com/questions/12916352/shell-script-read-missing-last-line
while IFS="=" read -r key value || [[ -n "$key" ]]; do
# if there are any empty lines in the store, skip them
if [[ -n $key ]]; then
keys+=("$key")
fi
done < "$f"
echo "${keys[@]}" | tr ' ' '\n' | sort -u echo "${keys[@]}" | tr ' ' '\n' | sort -u
fi fi
...@@ -55,9 +65,9 @@ kv_keys() { ...@@ -55,9 +65,9 @@ kv_keys() {
kv_list() { kv_list() {
local f=$1 local f=$1
kv_keys $f | tr ' ' '\n' | while read -r key; do kv_keys "$f" | tr ' ' '\n' | while read -r key; do
if [[ -n $key ]]; then if [[ -n $key ]]; then
echo "$key=$(kv_get_escaped $f $key)" echo "$key=$(kv_get_escaped "$f" "$key")"
fi fi
done done
} }
#!/usr/bin/env bash
# variable shared by this whole module
BUILD_DATA_FILE=""
PREVIOUS_BUILD_DATA_FILE=""
meta_create() {
local cache_dir="$1"
BUILD_DATA_FILE="$cache_dir/build-data/nodejs"
PREVIOUS_BUILD_DATA_FILE="$cache_dir/build-data/nodejs-prev"
# if the file already exists because it's from the last build, save it
if [[ -f "$BUILD_DATA_FILE" ]]; then
cp "$BUILD_DATA_FILE" "$PREVIOUS_BUILD_DATA_FILE"
fi
kv_create "$BUILD_DATA_FILE"
# make sure this doesnt grow over time
kv_clear "$BUILD_DATA_FILE"
}
meta_get() {
kv_get "$BUILD_DATA_FILE" "$1"
}
meta_set() {
kv_set "$BUILD_DATA_FILE" "$1" "$2"
}
# similar to mtime from stdlib
meta_time() {
local key="$1"
local start="$2"
local end="${3:-$(nowms)}"
local time
time="$(echo "${start}" "${end}" | awk '{ printf "%.3f", ($2 - $1)/1000 }')"
kv_set "$BUILD_DATA_FILE" "$key" "$time"
}
# similar to mtime from stdlib
meta_time() {
local key="$1"
local start="$2"
local end="${3:-$(nowms)}"
local time
time="$(echo "$start" "$end" | awk '{ printf "%.3f", ($2 - $1)/1000 }')"
kv_set "$BUILD_DATA_FILE" "$1" "$time"
}
# Retrieve a value from a previous build if it exists
# This is useful to give the user context about what changed if the
# build has failed. Ex:
# - changed stacks
# - deployed with a new major version of Node
# - etc
meta_prev_get() {
kv_get "$PREVIOUS_BUILD_DATA_FILE" "$1"
}
log_meta_data() {
# print all values on one line in logfmt format
# https://brandur.org/logfmt
# the echo call ensures that all values are printed on a single line
# shellcheck disable=SC2005 disable=SC2046
echo $(kv_list "$BUILD_DATA_FILE")
}
#!/usr/bin/env bash
monitor_memory_usage() { monitor_memory_usage() {
local output_file="$1" local output_file="$1"
...@@ -12,7 +13,7 @@ monitor_memory_usage() { ...@@ -12,7 +13,7 @@ monitor_memory_usage() {
pid=$! pid=$!
# if this build process is SIGTERM'd # if this build process is SIGTERM'd
trap "kill -TERM $pid" TERM trap 'kill -TERM $pid' TERM
# set the peak memory usage to 0 to start # set the peak memory usage to 0 to start
peak="0" peak="0"
...@@ -29,7 +30,7 @@ monitor_memory_usage() { ...@@ -29,7 +30,7 @@ monitor_memory_usage() {
done done
# ps gives us kb, let's convert to mb for convenience # ps gives us kb, let's convert to mb for convenience
echo "$(($peak / 1024))" > $output_file echo "$((peak / 1024))" > "$output_file"
# After wait returns we can get the exit code of $command # After wait returns we can get the exit code of $command
wait $pid wait $pid
...@@ -43,16 +44,21 @@ monitor_memory_usage() { ...@@ -43,16 +44,21 @@ monitor_memory_usage() {
} }
monitor() { monitor() {
local peak_mem_output start
local command_name=$1 local command_name=$1
shift shift
local command=( "$@" ) local command=( "$@" )
local peak_mem_output=$(mktemp)
local start=$(nowms) peak_mem_output=$(mktemp)
start=$(nowms)
# execute the subcommand and save the peak memory usage # execute the subcommand and save the peak memory usage
monitor_memory_usage $peak_mem_output "${command[@]}" monitor_memory_usage "$peak_mem_output" "${command[@]}"
mtime "exec.$command_name.time" "${start}" mtime "exec.$command_name.time" "${start}"
mmeasure "exec.$command_name.memory" "$(cat $peak_mem_output)" mmeasure "exec.$command_name.memory" "$(cat "$peak_mem_output")"
meta_time "$command_name-time" "$start"
meta_set "$command_name-memory" "$(cat "$peak_mem_output")"
} }
#!/usr/bin/env bash
# TODO: Merge these with the output helpers in buildpack-stdlib: # TODO: Merge these with the output helpers in buildpack-stdlib:
# https://github.com/heroku/buildpack-stdlib # https://github.com/heroku/buildpack-stdlib
...@@ -26,6 +28,15 @@ header() { ...@@ -26,6 +28,15 @@ header() {
echo "-----> $*" || true echo "-----> $*" || true
} }
bright_header() {
echo "" || true
echo -e "\033[1;33m-----> $* \033[0m"
}
header_skip_newline() {
echo "-----> $*" || true
}
error() { error() {
echo " ! $*" >&2 || true echo " ! $*" >&2 || true
echo "" || true echo "" || true
......
#!/usr/bin/env bash
get_node_major_version() { get_node_major_version() {
local node_version="$(node --version)" local node_version
node_version="$(node --version)"
# major_string will be ex: "6." "8." "10" # major_string will be ex: "6." "8." "10"
local major_string=${node_version:1:2} local major_string=${node_version:1:2}
# strip any "."s from major_string # strip any "."s from major_string
local major=${major_string//.} local major=${major_string//.}
echo $major echo "$major"
} }
install_plugin() { install_plugin() {
local major
local bp_dir="$1" local bp_dir="$1"
local build_dir="$2" local build_dir="$2"
local major=$(get_node_major_version) major=$(get_node_major_version)
local plugin="${bp_dir}/plugin/heroku-nodejs-plugin-node-${major}.tar.gz" local plugin="${bp_dir}/plugin/heroku-nodejs-plugin-node-${major}.tar.gz"
# If we have a version of the plugin compiled for this version of node, and the # If we have a version of the plugin compiled for this version of node, and the
...@@ -19,6 +23,6 @@ install_plugin() { ...@@ -19,6 +23,6 @@ install_plugin() {
# It will be included at runtime once the user opts into the Node metrics feature # It will be included at runtime once the user opts into the Node metrics feature
if [[ -f "${plugin}" ]] && [[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then if [[ -f "${plugin}" ]] && [[ -z "$HEROKU_SKIP_NODE_PLUGIN" ]]; then
mkdir -p "${build_dir}/.heroku/" mkdir -p "${build_dir}/.heroku/"
tar -xzf ${plugin} -C "${build_dir}/.heroku/" tar -xzf "${plugin}" -C "${build_dir}/.heroku/"
fi fi
} }
#!/usr/bin/env bash
uuid_fallback()
{
local N B C='89ab'
for (( N=0; N < 16; ++N ))
do
B=$(( RANDOM%256 ))
case $N in
6)
printf '4%x' $(( B%16 ))
;;
8)
printf '%c%x' ${C:$RANDOM%${#C}:1} $(( B%16 ))
;;
3 | 5 | 7 | 9)
printf '%02x-' $B
;;
*)
printf '%02x' $B
;;
esac
done
echo
}
uuid() {
# On Heroku's stack, there is a uuid command
if [[ -f /proc/sys/kernel/random/uuid ]]; then
cat /proc/sys/kernel/random/uuid
# on macOS there is also a command
elif [[ -x "$(command -v uuidgen)" ]]; then
uuidgen | tr "[:upper:]" "[:lower:]"
# If you are running this buildpack on an image without either of the above binaries
# then let's provide something that approximates this functionality, but beware that
# we can make no guarantees of true randomness or uniqueness of this ID. However it is
# likely only being piped to /dev/null
#
# If that's not true for you, please file an issue and let us know:
# https://github.com/heroku/heroku-buildpack-nodejs/issues
else
uuid_fallback
fi
}
test: heroku-18 heroku-16 cedar-14 test: heroku-18 heroku-16 cedar-14
build:
@GOOS=darwin GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-darwin ./cmd/resolve-version
@GOOS=linux GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-linux ./cmd/resolve-version
build-production:
# build go binaries and then compress them
@GOOS=darwin GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-darwin ./cmd/resolve-version
@GOOS=linux GOARCH=amd64 go build -ldflags="-s -w" -v -o ./vendor/resolve-version-linux ./cmd/resolve-version
# https://blog.filippo.io/shrink-your-go-binaries-with-this-one-weird-trick/
upx --brute vendor/resolve-version-linux
upx --brute vendor/resolve-version-darwin
test-binary:
go test -v ./cmd/... -tags=integration
shellcheck:
@shellcheck -x bin/compile bin/detect bin/release bin/test bin/test-compile
@shellcheck -x lib/**
@shellcheck -x ci-profile/**
@shellcheck -x etc/**
heroku-18: heroku-18:
@echo "Running tests in docker (heroku-18)..." @echo "Running tests in docker (heroku-18)..."
@docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-18" heroku/heroku:18 bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;'
......
...@@ -8,7 +8,7 @@ TAG_NAME=${1:-} ...@@ -8,7 +8,7 @@ TAG_NAME=${1:-}
PLUGIN_DIR=$(dirname $0) PLUGIN_DIR=$(dirname $0)
handle_failure() { handle_failure() {
echo "Failure running script." echo "Failure running script on line $1."
echo "This may be rate-limiting from Github if you've run this script a few times. Here is the rate limit response:" echo "This may be rate-limiting from Github if you've run this script a few times. Here is the rate limit response:"
...@@ -34,6 +34,8 @@ download() { ...@@ -34,6 +34,8 @@ download() {
delete_old_plugin() { delete_old_plugin() {
local dir=${1} local dir=${1}
rm -f "$dir/heroku-nodejs-plugin-node-12.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-12.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-11.sha512" rm -f "$dir/heroku-nodejs-plugin-node-11.sha512"
rm -f "$dir/heroku-nodejs-plugin-node-11.tar.gz" rm -f "$dir/heroku-nodejs-plugin-node-11.tar.gz"
rm -f "$dir/heroku-nodejs-plugin-node-10.sha512" rm -f "$dir/heroku-nodejs-plugin-node-10.sha512"
...@@ -64,6 +66,10 @@ download_assets_for_release() { ...@@ -64,6 +66,10 @@ download_assets_for_release() {
# Node 11 # Node 11
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.sha512" "$dir/heroku-nodejs-plugin-node-11.sha512" download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.sha512" "$dir/heroku-nodejs-plugin-node-11.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-11.tar.gz" download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-11-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-11.tar.gz"
# Node 12
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-12-$tag.sha512" "$dir/heroku-nodejs-plugin-node-12.sha512"
download "https://github.com/heroku/heroku-nodejs-plugin/releases/download/$tag/heroku-nodejs-plugin-node-12-$tag.tar.gz" "$dir/heroku-nodejs-plugin-node-12.tar.gz"
} }
test_hash() { test_hash() {
...@@ -81,7 +87,7 @@ test_hash() { ...@@ -81,7 +87,7 @@ test_hash() {
fi fi
} }
trap 'handle_failure' ERR trap 'handle_failure ${LINENO}' ERR
if [[ -z $TAG_NAME ]]; then if [[ -z $TAG_NAME ]]; then
TAG_NAME=$(get_latest_release) TAG_NAME=$(get_latest_release)
...@@ -100,5 +106,6 @@ test_hash 8 $PLUGIN_DIR ...@@ -100,5 +106,6 @@ test_hash 8 $PLUGIN_DIR
test_hash 9 $PLUGIN_DIR test_hash 9 $PLUGIN_DIR
test_hash 10 $PLUGIN_DIR test_hash 10 $PLUGIN_DIR
test_hash 11 $PLUGIN_DIR test_hash 11 $PLUGIN_DIR
test_hash 12 $PLUGIN_DIR
echo "Done" echo "Done"
10e91fb8f741a42226fe91daf41a30032f6950f560622ce4523766d93dfd9d01dc88b3d5bfb26968d69d5f8fb6c61f0b35db310f61c366ae0c8d48c9181ee02c heroku-nodejs-plugin-node-10-v4.tar.gz d6ad0f45d5564f324147f590ce9ac39c5973a64dfb1905eb993dfcf3dce21b6934a7821ffbd853ff30e89952ba17e7667809152e187453d7c18579d945a5f8bd heroku-nodejs-plugin-node-10-v5.tar.gz
6d0732e32f6cb2a818c415c5fc454912113da273c3b158124c630db473aa0b157fc81c41dd90de887a804e3c2c2eae0b9897d0da329b7a6e52b9cf27db4e5d0a heroku-nodejs-plugin-node-11-v4.tar.gz 0afc36d4268b7ce3dd1c59813d7727a1bae715645bc0fb006ca992ccd028df692e31d2a4df697ab08d6b4baad03cd6ebef8525e481e0c5cf12e810c30e1da0cb heroku-nodejs-plugin-node-11-v5.tar.gz
0020b60fd3aebcc1fb13770e3445a93c0633579853ee0d31d3dc12d02e4a8a1d418a4d4add2145da46f9d2820f6ae1e412a0beb4eb3e46d7fc8326d2e2d8248d heroku-nodejs-plugin-node-12-v5.tar.gz
82a7f67bf61490d40aa69455a6a1fd7459f9fc219dfe9b0ac65cf7345e8a1c10070ce96ef42eecc5eb82fb5d7ea74b69e1a060c44f0a438eb02e81fd109c2ea4 heroku-nodejs-plugin-node-8-v4.tar.gz d96566824853bc7657fbf2f651067ed0b1747e4d6b4b9b443df6f2d490742b4e571dff5165e68c6d8af44501af0bdddd98e71eeb5fcc9816348453a1f8a314d9 heroku-nodejs-plugin-node-8-v5.tar.gz
d7bca7b45d0852e082103041b7a5d1331470074223314273275c4cd8d09ef6174c14b270ede7f501e6280e935814535783a8d4050d9e21a8918b1ab81f444870 heroku-nodejs-plugin-node-9-v4.tar.gz eefbf22a508e0fd4dea303a7d247a4c6ebb60803c5221e43dd6a9921332ad32791f6b4e95a3379c519290a6767a1dc797f3e68ed583427a53695f47c7b80ccdd heroku-nodejs-plugin-node-9-v5.tar.gz
require_relative '../spec_helper'
describe "Node Metrics for v12.x" do
context "test metrics for Node v12.x app" do
let(:app) {
Hatchet::Runner.new(
"spec/fixtures/repos/node-12-metrics",
config: {
"HEROKU_METRICS_URL" => "http://localhost:3000",
"METRICS_INTERVAL_OVERRIDE" => "10000"
}
)
}
it "should deploy" do
app.deploy do |app|
data = successful_json_body(app)
expect(data["gauges"]["node.eventloop.delay.ms.max"]).to be >= 2000
expect(data["counters"]["node.gc.collections"]).to be >= 0
expect(data["counters"]["node.gc.young.collections"]).to be >= 0
expect(data["counters"]["node.gc.old.collections"]).to be >= 0
end
end
end
end
require_relative '../spec_helper'
describe "Hello World for Node v12.x" do
context "a single-process Node v12.x app" do
let(:app) {
Hatchet::Runner.new("spec/fixtures/repos/node-12")
}
it "should deploy successfully" do
app.deploy do |app|
expect(successful_body(app).strip).to eq("Hello, world!")
end
end
end
end
{
"name": "node-metrics-test-app",
"version": "1.0.0",
"engines": {
"node": "12.x"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const EventEmitter = require('events');
const PORT = process.env.PORT || 5000;
const Events = new EventEmitter();
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`blocking the event loop for ${ms}ms`);
let now = new Date().getTime();
let result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now + ms)
break;
}
resolve();
}, 100);
});
}
function getNextMetricsEvent() {
return new Promise((resolve, reject) => Events.once('metrics', resolve));
}
const server = http.createServer((req, res) => {
// wait for the next metrics event
getNextMetricsEvent()
.then(blockCpuFor(2000))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
.then(blockCpuFor(100))
// gather the next metrics data which should include these pauses
.then(getNextMetricsEvent())
.then(data => {
res.setHeader('Content-Type', 'application/json');
res.end(data);
})
.catch(() => {
res.statusCode = 500;
res.end("Something went wrong");
});
});
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
// Create a second server that intercepts the HTTP requests
// sent by the metrics plugin
const metricsListener = http.createServer((req, res) => {
if (req.method == 'POST') {
let body = '';
req.on('data', (data) => body += data);
req.on('end', () => Events.emit('metrics', body));
res.statusCode = 200;
res.end();
}
});
metricsListener.listen(3000, () => console.log('Listening for metrics on 3000'));
{
"name": "hello-world",
"version": "1.0.0",
"engines": {
"node": "12.x"
},
"scripts": {
"prettify": "prettier --single-quote --trailing-comma all --write 'bin/*' 'src/**/*.js'",
"test": "jest --silent",
"dev": "nodemon --watch . --watch src/* src/index.js",
"heroku-postbuild": "echo NODE_OPTIONS: $NODE_OPTIONS"
},
"main": "index.js",
"license": "MIT",
"devDependencies": {
"jest": "^19.0.2",
"nodemon": "^1.11.0",
"prettier": "^0.22.0"
},
"dependencies": {}
}
#!/usr/bin/env node
const http = require('http');
const crypto = require('crypto');
const PORT = process.env.PORT || 5000;
// This will block the event loop for ~lengths of time
function blockCpuFor(ms) {
var now = new Date().getTime();
var result = 0
while(true) {
result += Math.random() * Math.random();
if (new Date().getTime() > now +ms)
return;
}
}
// block the event loop for 100ms every second
setInterval(() => {
blockCpuFor(100);
}, 1000)
// block the event loop for 1sec every 30 seconds
setInterval(() => {
blockCpuFor(1000);
}, 30000)
// Allocate and erase memory on an interval
let store = [];
setInterval(() => {
store.push(crypto.randomBytes(1000000).toString('hex'));
}, 500);
setInterval(() => {
store = [];
}, 60000);
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end("Hello, world!");
})
server.listen(PORT, () => console.log(`Listening on ${PORT}`));
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
"url": "http://github.com/example/example.git" "url": "http://github.com/example/example.git"
}, },
"engines": { "engines": {
"node": "~0.10.0" "node": "10.x"
}, },
"scripts" : { "scripts" : {
"build": "echo build hook message", "build": "echo build hook message",
......
...@@ -6,11 +6,8 @@ ...@@ -6,11 +6,8 @@
"type" : "git", "type" : "git",
"url" : "http://github.com/example/example.git" "url" : "http://github.com/example/example.git"
}, },
"engines": {
"node": "10.x"
},
"scripts" : { "scripts" : {
"build" : "echo build hook message" "build" : "echo build hook message"
}, },
"heroku-run-build-script": true "license": "MIT"
} }
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
"url" : "http://github.com/example/example.git" "url" : "http://github.com/example/example.git"
}, },
"engines": { "engines": {
"node": "~0.10.0" "node": "10.x"
}, },
"scripts" : { "scripts" : {
"build" : "echo build hook message" "build" : "echo build hook message"
......
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"scripts" : {
"build" : "echo build hook message",
"heroku-postbuild": ""
}
}
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
A fake README, to keep npm from polluting stderr.
\ No newline at end of file
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository" : {
"type" : "git",
"url" : "http://github.com/example/example.git"
},
"engines": {
"node": "10.x"
},
"scripts" : {
"build" : "echo build hook message",
"heroku-postbuild": ""
}
}
...@@ -10,9 +10,9 @@ ...@@ -10,9 +10,9 @@
"hashish": "*" "hashish": "*"
}, },
"engines": { "engines": {
"node": "0.10.38" "node": "8.x"
}, },
"scripts": { "scripts": {
"postinstall": "exit 1" "heroku-postbuild": "exit 1"
} }
} }
{
"name": "node-buildpack-test-app",
"version": "0.0.1",
"description": "node buildpack integration test app",
"repository": {
"type": "git",
"url": "http://github.com/example/example.git"
},
"engines": {
"node": "10.x"
},
"scripts": {
"build": "echo build hook message",
"heroku-postbuild": "",
"random-script-name": ""
}
}
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
"url" : "http://github.com/example/example.git" "url" : "http://github.com/example/example.git"
}, },
"engines": { "engines": {
"node": "~0.10.0" "node": "10.x"
}, },
"scripts" : { "scripts" : {
"heroku-prebuild" : "echo heroku-prebuild hook message", "heroku-prebuild" : "echo heroku-prebuild hook message",
......
...@@ -16,27 +16,38 @@ testFlatmapStream() { ...@@ -16,27 +16,38 @@ testFlatmapStream() {
} }
testBuildScriptBehavior() { testBuildScriptBehavior() {
# opt in to new build script behavior
cache=$(mktmpdir)
env_dir=$(mktmpdir)
echo "true" > $env_dir/NEW_BUILD_SCRIPT_BEHAVIOR
# The 'build' script is run by default # The 'build' script is run by default
compile "build-script" $cache $env_dir compile "build-script"
assertCaptured "Running build" assertCaptured "Running build"
assertCapturedSuccess assertCapturedSuccess
# the 'heroku-postbuild' script takes precedence over the 'build' script # the 'heroku-postbuild' script takes precedence over the 'build' script
compile "build-script-override" $cache $env_dir compile "build-script-override"
assertCaptured "Detected both 'build' and 'heroku-postbuild' scripts" assertCaptured "Detected both \"build\" and \"heroku-postbuild\" scripts"
assertCaptured "Running heroku-postbuild" assertCaptured "Running heroku-postbuild"
assertCapturedSuccess assertCapturedSuccess
} }
testBuildScriptOptIn() { testBuildScriptYarn() {
compile "build-script-opt-in" compile "build-script-yarn"
assertCaptured "Running build" assertCaptured "Running build (yarn)"
assertCaptured "Opting in to new default build script behavior" assertCaptured "build hook message"
assertCapturedSuccess
}
testPreferEmptyHerokuPostbuildOverBuild() {
compile "empty-heroku-postbuild"
assertCaptured "Detected both \"build\" and \"heroku-postbuild\" scripts"
assertCaptured "Running heroku-postbuild"
assertNotCaptured "build hook message"
assertCapturedSuccess
}
testEmptyHerokuPostbuildWithYarn() {
compile "empty-heroku-postbuild-yarn"
assertCaptured "Running heroku-postbuild (yarn)"
assertNotCaptured "build hook message"
assertNotCaptured "Script must exist"
assertCapturedSuccess assertCapturedSuccess
} }
...@@ -479,13 +490,6 @@ testInvalidNodeSemver() { ...@@ -479,13 +490,6 @@ testInvalidNodeSemver() {
assertCapturedError assertCapturedError
} }
testInvalidIo() {
compile "invalid-io"
assertCaptured "Resolving iojs version 2.0.99"
assertCaptured "Could not find Iojs version corresponding to version requirement: 2.0.99"
assertCapturedError
}
testSignatureInvalidation() { testSignatureInvalidation() {
cache=$(mktmpdir) cache=$(mktmpdir)
env_dir=$(mktmpdir) env_dir=$(mktmpdir)
...@@ -527,10 +531,8 @@ testDetectWithoutPackageJson() { ...@@ -527,10 +531,8 @@ testDetectWithoutPackageJson() {
testIoJs() { testIoJs() {
compile "iojs" compile "iojs"
assertCaptured "engines.iojs (package.json): 1.0." assertCaptured "io.js no longer supported"
assertCaptured "Downloading and installing iojs 1.0." assertCapturedError
assertNotCaptured "Downloading and installing npm"
assertCapturedSuccess
} }
testSpecificVersion() { testSpecificVersion() {
...@@ -562,11 +564,6 @@ testOldNpm() { ...@@ -562,11 +564,6 @@ testOldNpm() {
assertCapturedError assertCapturedError
} }
testOldNpm2() {
compile "failing-build"
assertCaptured "This version of npm (1.4.28) has several known issues"
}
testNonexistentNpm() { testNonexistentNpm() {
compile "nonexistent-npm" compile "nonexistent-npm"
assertCaptured "Unable to install npm 1.1.65" assertCaptured "Unable to install npm 1.1.65"
...@@ -594,7 +591,6 @@ testNpmVersionSpecific() { ...@@ -594,7 +591,6 @@ testNpmVersionSpecific() {
testFailingBuild() { testFailingBuild() {
compile "failing-build" compile "failing-build"
assertCaptured "Building dependencies"
assertCaptured "Build failed" assertCaptured "Build failed"
assertCaptured "We're sorry this build is failing" assertCaptured "We're sorry this build is failing"
assertNotCaptured "Checking startup method" assertNotCaptured "Checking startup method"
...@@ -874,6 +870,12 @@ testCIEnvVars() { ...@@ -874,6 +870,12 @@ testCIEnvVars() {
assertCapturedSuccess assertCapturedSuccess
} }
# If compile fails, test-compile should also fail
testCICompileFails() {
testCompile "failing-build"
assertCapturedError
}
testCIEnvVarsOverride() { testCIEnvVarsOverride() {
env_dir=$(mktmpdir) env_dir=$(mktmpdir)
echo "banana" > $env_dir/NODE_ENV echo "banana" > $env_dir/NODE_ENV
...@@ -1031,6 +1033,96 @@ testMemoryMetrics() { ...@@ -1031,6 +1033,96 @@ testMemoryMetrics() {
assertFileNotContains "measure#buildpack.nodejs.exec.heroku-postbuild.memory=" $metrics_log assertFileNotContains "measure#buildpack.nodejs.exec.heroku-postbuild.memory=" $metrics_log
} }
testBuildMetaData() {
local log_file=$(mktemp)
BUILDPACK_LOG_FILE="$log_file" compile "pre-post-build-scripts"
# build info
assertFileContains "node-package-manager=npm" $log_file
assertFileContains "checked-in-node-modules=false" $log_file
assertFileContains "has-node-lock-file=false" $log_file
assertFileContains "cache-status=not-found" $log_file
assertFileContains "node-build-success=true" $log_file
assertFileContains "build-time=" $log_file
assertFileContains "app-uuid=" $log_file
assertFileContains "build-uuid=" $log_file
# binary versions
assertFileContains "node-version-request=10.x" $log_file
assertFileContains "npm-version-request= " $log_file
# log build scripts
assertFileContains "heroku-prebuild-script=\"echo heroku-prebuild hook message\"" $log_file
assertFileContains "heroku-postbuild-script=\"echo heroku-postbuild hook message\"" $log_file
assertFileContains "build-script= " $log_file
# monitor calls
assertFileContains "install-node-binary-memory=" $log_file
assertFileContains "install-node-binary-time=" $log_file
assertFileContains "install-npm-binary-time=" $log_file
assertFileContains "install-npm-binary-memory=" $log_file
assertFileContains "heroku-prebuild-time=" $log_file
assertFileContains "heroku-prebuild-memory=" $log_file
assertFileContains "npm-install-time=" $log_file
assertFileContains "npm-install-memory=" $log_file
assertFileContains "heroku-postbuild-time=" $log_file
assertFileContains "heroku-postbuild-memory=" $log_file
assertFileContains "npm-prune-memory=" $log_file
assertFileContains "npm-prune-time=" $log_file
# erase the log file
echo "" > $log_file
BUILDPACK_LOG_FILE="$log_file" compile "yarn"
assertFileContains "node-package-manager=yarn" $log_file
assertFileContains "has-node-lock-file=true" $log_file
assertFileContains "yarn-version-request=1.x" $log_file
assertFileContains "yarn-version=1." $log_file
assertFileContains "install-yarn-binary-memory=" $log_file
assertFileContains "install-yarn-binary-time=" $log_file
assertFileContains "node-build-success=true" $log_file
# log resolve logic dark-launch
assertFileContains "resolve-matches-nodebin-yarn=true" $log_file
assertFileContains "resolve-matches-nodebin-node=true" $log_file
}
testFailingBuildMetaData() {
local log_file=$(mktemp)
BUILDPACK_LOG_FILE="$log_file" compile "bad-json"
assertFileContains "failure=invalid-package-json" $log_file
assertCapturedError
echo "" > $log_file
BUILDPACK_LOG_FILE="$log_file" compile "yarn-lockfile-out-of-date"
assertFileContains "failure=outdated-yarn-lockfile" $log_file
assertCapturedError
}
testPropagateAppUUID() {
env_dir=$(mktmpdir)
local log_file=$(mktemp)
local cache_dir=${2:-$(mktmpdir)}
echo "$log_file" > $env_dir/BUILDPACK_LOG_FILE
# save the generated app-uuid for the first build
compile "node-10" $cache_dir $env_dir
assertFileContains "app-uuid=" $log_file
local uuid=$(cat $log_file | sed -n -e 's/^.*app-uuid=\([^ ]*\).*/\1/p')
# create a new log file
log_file=$(mktemp)
echo "$log_file" > $env_dir/BUILDPACK_LOG_FILE
# recompile with the same cache directory
compile "node-10" $cache_dir $env_dir
assertFileContains "app-uuid" $log_file
# make sure that the app-uuid is the same
assertEquals "$uuid" "$(cat $log_file | sed -n -e 's/^.*app-uuid=\([^ ]*\).*/\1/p')"
}
testBinDetectWarnings() { testBinDetectWarnings() {
detect "slugignore-package-json" detect "slugignore-package-json"
assertCapturedError "'package.json' listed in '.slugignore' file" assertCapturedError "'package.json' listed in '.slugignore' file"
...@@ -1093,6 +1185,15 @@ compile() { ...@@ -1093,6 +1185,15 @@ compile() {
capture ${bp_dir}/bin/compile ${compile_dir} ${2:-$(mktmpdir)} $3 capture ${bp_dir}/bin/compile ${compile_dir} ${2:-$(mktmpdir)} $3
} }
testCompile() {
default_process_types_cleanup
bp_dir=$(mktmpdir)
compile_dir=$(mktmpdir)
cp -a "$(pwd)"/* ${bp_dir}
cp -a ${bp_dir}/test/fixtures/$1/. ${compile_dir}
capture ${bp_dir}/bin/test-compile ${compile_dir} ${2:-$(mktmpdir)} $3
}
# This is meant to be run after `compile`. `cleanupStartup` must be run # This is meant to be run after `compile`. `cleanupStartup` must be run
# after this function is called before other tests are executed # after this function is called before other tests are executed
executeStartup() { executeStartup() {
......
...@@ -108,6 +108,26 @@ testKeyValue() { ...@@ -108,6 +108,26 @@ testKeyValue() {
assertEquals "" "$(kv_list $store)" assertEquals "" "$(kv_list $store)"
} }
testKeyValueNoNewLine() {
local store
# use a fixture that does not have an empty line after the final entry
store="$(pwd)/test/unit-fixtures/kvstore/no-new-line"
assertEquals "$(printf "%s\n" a=b b=c)" "$(kv_list $store)"
assertEquals "$(printf "%s\n" a b)" "$(kv_keys $store)"
}
testKeyValueEmptyLine() {
local store
# use a fixture that has an extra empty line
store="$(pwd)/test/unit-fixtures/kvstore/empty-line"
assertEquals "$(printf "%s\n" a=b b=c)" "$(kv_list $store)"
assertEquals "$(printf "%s\n" a b)" "$(kv_keys $store)"
}
testKeyValueEscaping() { testKeyValueEscaping() {
local store=$(mktemp) local store=$(mktemp)
...@@ -151,21 +171,71 @@ testKeyValueNoFile() { ...@@ -151,21 +171,71 @@ testKeyValueNoFile() {
testBuildData() { testBuildData() {
local cache_dir=$(mktemp -d) local cache_dir=$(mktemp -d)
bd_create $cache_dir meta_create $cache_dir
bd_set "test" "foo" meta_set "test" "foo"
assertEquals "test=foo" "$(log_build_data)" assertEquals "test=foo" "$(log_meta_data)"
bd_set "test" "different-foo" meta_set "test" "different-foo"
assertEquals "test=different-foo" "$(log_build_data)" assertEquals "test=different-foo" "$(log_meta_data)"
bd_set "foo" "value with spaces" meta_set "foo" "value with spaces"
assertEquals "foo=\"value with spaces\" test=different-foo" "$(log_build_data)" assertEquals "foo=\"value with spaces\" test=different-foo" "$(log_meta_data)"
# values are printed with the keys sorted alphabetically # values are printed with the keys sorted alphabetically
# this isn't required, and this test serves as documentation # this isn't required, and this test serves as documentation
bd_set "a" "this should come first" meta_set "a" "this should come first"
assertEquals "a=\"this should come first\" foo=\"value with spaces\" test=different-foo" "$(log_build_data)" assertEquals "a=\"this should come first\" foo=\"value with spaces\" test=different-foo" "$(log_meta_data)"
# dates generated by running `nowms; sleep 10; nowms`
meta_time "time" "1545178120033" "1545178130043"
assertEquals "10.010" "$(meta_get time)"
# dates generated by running `nowms; sleep 1; nowms`
meta_time "time" "1545178503025" "1545178504027"
assertEquals "1.002" "$(meta_get time)"
# dates generated by running `nowms; sleep 30; nowms`
meta_time "time" "1545178521204" "1545178551206"
assertEquals "30.002" "$(meta_get time)"
}
testBuildDataPreviousBuild() {
local cache_dir=$(mktemp -d)
# the first time, there will be no previous build file
meta_create "$cache_dir"
assertContains "nodejs" "$BUILD_DATA_FILE"
assertContains "nodejs-prev" "$PREVIOUS_BUILD_DATA_FILE"
assertFileExists "$BUILD_DATA_FILE"
# set a value in the build data file
meta_set "test" "foo"
assertFileContains "test=foo" "$BUILD_DATA_FILE"
assertFileDoesNotExist "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_get test)" "foo"
assertEquals "$(meta_prev_get test)" ""
# the second time this is called (cache restored)
# there will be a previous build file
meta_create "$cache_dir"
assertFileExists "$BUILD_DATA_FILE"
assertFileExists "$PREVIOUS_BUILD_DATA_FILE"
# the data stored in the previous build should now be in the second file
assertFileNotContains "test=foo" "$BUILD_DATA_FILE"
assertFileContains "test=foo" "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_get test)" ""
assertEquals "$(meta_prev_get test)" "foo"
meta_set "test" "bar"
# doing it once more does not result in an error
meta_create "$cache_dir"
assertFileExists "$BUILD_DATA_FILE"
assertFileExists "$PREVIOUS_BUILD_DATA_FILE"
assertEquals "$(meta_prev_get test)" "bar"
assertEquals "$(meta_get test)" ""
} }
testWebConcurrencyProfileScript() { testWebConcurrencyProfileScript() {
...@@ -210,15 +280,103 @@ testWebConcurrencyProfileScript() { ...@@ -210,15 +280,103 @@ testWebConcurrencyProfileScript() {
assertEquals "1" "$(calculate_concurrency 512 1)" assertEquals "1" "$(calculate_concurrency 512 1)"
} }
isUUID() {
if [[ ${1//-/} =~ ^[[:xdigit:]]{32}$ ]]; then
echo true
else
echo false
fi
}
testUUID() {
local first second
first=$(uuid)
second=$(uuid)
assertNotEquals "$first" "$second"
assertEquals "true" "$(isUUID "$first")"
assertEquals "true" "$(isUUID "$second")"
}
testUUIDFallback() {
local first second
first=$(uuid_fallback)
second=$(uuid_fallback)
assertNotEquals "$first" "$second"
assertEquals "true" "$(isUUID "$first")"
assertEquals "true" "$(isUUID "$second")"
}
testHasScript() {
local file="$(pwd)/test/fixtures/has-script-fixtures/package.json"
assertEquals "true" "$(has_script "$file" "build")"
assertEquals "true" "$(has_script "$file" "heroku-postbuild")"
assertEquals "false" "$(has_script "$file" "postinstall")"
assertEquals "true" "$(has_script "$file" "random-script-name")"
}
testExperiments() {
local schema="$(pwd)/test/unit-fixtures/experiments/experiments-v1"
local schema_next="$(pwd)/test/unit-fixtures/experiments/experiments-v1-next"
local schema_v2="$(pwd)/test/unit-fixtures/experiments/experiments-v2"
local cache_dir=$(mktemp -d)
local val
experiments_init "nodejs" "$cache_dir" "$schema"
# these should always be the same
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
# this will change, but stay the same between runs
val="$(experiments_get "ab-test")"
# pretend this is the next time this build is run
experiments_init "nodejs" "$cache_dir" "$schema"
# these should always be the same
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
# val should be the same as it was before
assertEquals "$val" "$(experiments_get "ab-test")"
# now we add a new feature to the schema
experiments_init "nodejs" "$cache_dir" "$schema_next"
assertEquals "true" "$(experiments_get "all-on")"
assertEquals "false" "$(experiments_get "all-off")"
assertEquals "$val" "$(experiments_get "ab-test")"
assertEquals "true" "$(experiments_get "new-always-on")"
# reset the schema
experiments_init "nodejs" "$cache_dir" "$schema_v2"
assertNotNull "$(experiments_get "new-feature")"
assertNull "$(experiments_get "all-on")"
assertNull "$(experiments_get "all-off")"
assertNull "$(experiments_get "ab-test")"
assertNull "$(experiments_get "new-always-on")"
}
BP_DIR="$(pwd)"
# mocks # mocks
source "$(pwd)"/test/mocks/stdlib.sh source "$(pwd)"/test/mocks/stdlib.sh
# the modules to be tested # the modules to be tested
source "$(pwd)"/lib/uuid.sh
source "$(pwd)"/lib/environment.sh
source "$(pwd)"/lib/json.sh
source "$(pwd)"/lib/json.sh
source "$(pwd)"/lib/monitor.sh source "$(pwd)"/lib/monitor.sh
source "$(pwd)"/lib/output.sh source "$(pwd)"/lib/output.sh
source "$(pwd)"/lib/kvstore.sh source "$(pwd)"/lib/kvstore.sh
source "$(pwd)"/lib/build-data.sh source "$(pwd)"/lib/experiments.sh
source "$(pwd)"/lib/metadata.sh
source "$(pwd)"/profile/WEB_CONCURRENCY.sh source "$(pwd)"/profile/WEB_CONCURRENCY.sh
# testing utils
source "$(pwd)"/test/utils
# import the testing framework # import the testing framework
source "$(pwd)"/test/shunit2 source "$(pwd)"/test/shunit2
#version=1
ab-test=50
all-on=100
all-off=0
#version=1
ab-test=50
all-on=100
all-off=0
new-always-on=100
a=b
b=c
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment