Compare commits

..

8 Commits

Author SHA1 Message Date
Nick Fields
8e4fe2b191 patch: remove original multiline tests from ci workflow 2022-06-20 09:26:36 -04:00
Nick Fields
676683ec60 patch: move multiline tests to separate job 2022-06-20 09:25:15 -04:00
Anton Chaporgin
772a768088 tests checked with act 2022-06-20 14:04:06 +03:00
Anton Chaporgin
6e8635d64a tests checked with act 2022-06-20 12:01:02 +03:00
Anton Chaporgin
2d8f74008e self review 2022-06-20 11:28:31 +03:00
Anton Chaporgin
4d604b1776 tests 2022-06-20 11:25:59 +03:00
Nick Fields
29e1861bc1 Merge branch 'master' into patch-1 2022-06-19 22:06:19 -04:00
Anton Chaporgin
db59b2620d add -e to bash to support multiline commands
#43 #53
2022-06-09 10:33:36 +03:00
28 changed files with 921 additions and 11856 deletions

View File

@@ -1 +0,0 @@
.eslintrc.js

View File

@@ -1,7 +0,0 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
ignorePatterns: ['**/*.js', 'dist/'],
};

View File

@@ -1,2 +0,0 @@
dist/
node_modules/

View File

@@ -1,5 +0,0 @@
trailingComma: 'es5'
tabWidth: 2
semi: true
singleQuote: true
printWidth: 100

View File

@@ -1,13 +0,0 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts'],
rootDir: '..',
testEnvironment: 'node',
testMatch: ['<rootDir>/src/**/*.test.ts'],
transform: {
'^.+\\.ts$': 'ts-jest',
},
verbose: true,
collectCoverage: true,
collectCoverageFrom: ['src/**/*.{js,ts,jsx,tsx}'],
};

View File

@@ -3,7 +3,8 @@ name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: nick-fields
assignees: nick-invision
---
**Describe the bug**
@@ -16,4 +17,4 @@ A clear and concise description of what you expected to happen.
If applicable, add screenshots to help explain your problem.
**Logs**
Enable [debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging) then attach the [raw logs](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/using-workflow-run-logs#downloading-logs) (specifically the raw output of this action).
Enable [debug logging](https://docs.github.com/en/free-pro-team@latest/actions/managing-workflow-runs/enabling-debug-logging#enabling-step-debug-logging) then attach the [raw logs](https://docs.github.com/en/free-pro-team@latest/actions/managing-workflow-runs/using-workflow-run-logs#downloading-logs) (specifically the raw output of this action).

12
.github/codecov.yml vendored
View File

@@ -1,12 +0,0 @@
# see https://docs.codecov.com/docs/codecovyml-reference
codecov:
require_ci_to_pass: false
comment:
layout: 'diff, flags'
behavior: default
require_changes: true
coverage:
# don't pass/fail PRs for coverage yet
status:
project: off
patch: off

View File

@@ -1,10 +0,0 @@
_Replace the bullet points below with your answers_
### Description
- What change is being made and why?
### Testing
- What tests were added?
- These can be either ["integration tests"](./workflows/ci_cd.yml) or unit tests

View File

@@ -1,36 +1,13 @@
name: CI/CD
on:
# only on PRs into and merge to default branch
pull_request:
branches:
- master
push:
branches:
- master
jobs:
ci_unit:
name: Run Unit Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: Run Unit Tests
run: npm test
- uses: codecov/codecov-action@v3
with:
directory: ./coverage/
verbose: true
ci_integration:
name: Run Integration Tests
runs-on: ubuntu-latest
# runs on branch pushes only
ci:
name: Run Tests
if: startsWith(github.ref, 'refs/heads')
runs-on: ubuntu-18.04
steps:
- name: Checkout
uses: actions/checkout@v2
@@ -59,6 +36,94 @@ jobs:
command: node ./.github/scripts/log-examples.js
timeout_minutes: 1
- name: sad-path (retry_wait_seconds)
id: sad_path_wait_sec
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
retry_wait_seconds: 15
command: npm install this-isnt-a-real-package-name-zzz
- uses: nick-invision/assert-action@v1
with:
expected: 3
actual: ${{ steps.sad_path_wait_sec.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.sad_path_wait_sec.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 'Final attempt failed'
actual: ${{ steps.sad_path_wait_sec.outputs.exit_error }}
comparison: contains
- name: new-command-on-retry
id: new-command-on-retry
uses: ./
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
new_command_on_retry: node -e "console.log('this is the new command on retry')"
- name: on-retry-cmd
id: on-retry-cmd
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
on_retry_command: node -e "console.log('this is a retry command')"
- name: retry_on_exit_code (with expected error code)
id: retry_on_exit_code_expected
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
retry_on_exit_code: 2
max_attempts: 3
command: node -e "process.exit(2)"
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.retry_on_exit_code_expected.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 3
actual: ${{ steps.retry_on_exit_code_expected.outputs.total_attempts }}
- name: retry_on_exit_code (with unexpected error code)
id: retry_on_exit_code_unexpected
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
retry_on_exit_code: 2
max_attempts: 3
command: node -e "process.exit(1)"
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.retry_on_exit_code_unexpected.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 1
actual: ${{ steps.retry_on_exit_code_unexpected.outputs.total_attempts }}
- name: on-retry-cmd (on-retry fails)
id: on-retry-cmd-fails
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
on_retry_command: node -e "throw new Error('This is an on-retry command error')"
- name: sad-path (error)
id: sad_path_error
uses: ./
@@ -76,6 +141,41 @@ jobs:
expected: failure
actual: ${{ steps.sad_path_error.outcome }}
- name: happy-path (continue_on_error)
id: happy_path_continue_on_error
uses: ./
with:
command: node -e "process.exit(0)"
timeout_minutes: 1
continue_on_error: true
- name: sad-path (continue_on_error)
id: sad_path_continue_on_error
uses: ./
with:
command: node -e "process.exit(33)"
timeout_minutes: 1
continue_on_error: true
- name: Verify continue_on_error returns correct exit code on success
uses: nick-invision/assert-action@v1
with:
expected: 0
actual: ${{ steps.happy_path_continue_on_error.outputs.exit_code }}
- name: Verify continue_on_error exits with correct outcome on success
uses: nick-invision/assert-action@v1
with:
expected: success
actual: ${{ steps.happy_path_continue_on_error.outcome }}
- name: Verify continue_on_error returns correct exit code on error
uses: nick-invision/assert-action@v1
with:
expected: 33
actual: ${{ steps.sad_path_continue_on_error.outputs.exit_code }}
- name: Verify continue_on_error exits with successful outcome when an error occurs
uses: nick-invision/assert-action@v1
with:
expected: success
actual: ${{ steps.sad_path_continue_on_error.outcome }}
- name: retry_on (timeout) fails early if error encountered
id: retry_on_timeout_fail
uses: ./
@@ -120,263 +220,7 @@ jobs:
expected: 2
actual: ${{ steps.retry_on_error.outputs.exit_code }}
- name: sad-path (wrong shell for OS)
id: wrong_shell
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 2
shell: cmd
command: 'dir'
- uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.wrong_shell.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.wrong_shell.outcome }}
ci_integration_envvar:
name: Run Integration Env Var Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: env-vars-passed-through
uses: ./
env:
NODE_OPTIONS: '--max_old_space_size=3072'
with:
timeout_minutes: 1
max_attempts: 2
command: node -e 'console.log(process.env.NODE_OPTIONS)'
ci_integration_large_output:
name: Run Integration Large Output Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: Test 100MiB of output can be processed
id: large-output
continue-on-error: true
uses: ./
with:
max_attempts: 1
timeout_minutes: 5
command: 'make -C ./test-data/large-output bytes-102400'
- name: Assert test had expected result
uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.large-output.outcome }}
- name: Assert exit code is expected
uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.large-output.outputs.exit_code }}
ci_integration_retry_on_exit_code:
name: Run Integration retry_on_exit_code Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: retry_on_exit_code (with expected error code)
id: retry_on_exit_code_expected
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
retry_on_exit_code: 2
max_attempts: 3
command: node -e "process.exit(2)"
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.retry_on_exit_code_expected.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 3
actual: ${{ steps.retry_on_exit_code_expected.outputs.total_attempts }}
- name: retry_on_exit_code (with unexpected error code)
id: retry_on_exit_code_unexpected
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
retry_on_exit_code: 2
max_attempts: 3
command: node -e "process.exit(1)"
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.retry_on_exit_code_unexpected.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 1
actual: ${{ steps.retry_on_exit_code_unexpected.outputs.total_attempts }}
ci_integration_continue_on_error:
name: Run Integration continue_on_error Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: happy-path (continue_on_error)
id: happy_path_continue_on_error
uses: ./
with:
command: node -e "process.exit(0)"
timeout_minutes: 1
continue_on_error: true
- name: sad-path (continue_on_error)
id: sad_path_continue_on_error
uses: ./
with:
command: node -e "process.exit(33)"
timeout_minutes: 1
continue_on_error: true
- name: Verify continue_on_error returns correct exit code on success
uses: nick-invision/assert-action@v1
with:
expected: 0
actual: ${{ steps.happy_path_continue_on_error.outputs.exit_code }}
- name: Verify continue_on_error exits with correct outcome on success
uses: nick-invision/assert-action@v1
with:
expected: success
actual: ${{ steps.happy_path_continue_on_error.outcome }}
- name: Verify continue_on_error returns correct exit code on error
uses: nick-invision/assert-action@v1
with:
expected: 33
actual: ${{ steps.sad_path_continue_on_error.outputs.exit_code }}
- name: Verify continue_on_error exits with successful outcome when an error occurs
uses: nick-invision/assert-action@v1
with:
expected: success
actual: ${{ steps.sad_path_continue_on_error.outcome }}
ci_integration_retry_wait_seconds:
name: Run Integration Tests (retry_wait_seconds)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: sad-path (retry_wait_seconds)
id: sad_path_wait_sec
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
retry_wait_seconds: 15
command: npm install this-isnt-a-real-package-name-zzz
- uses: nick-invision/assert-action@v1
with:
expected: 3
actual: ${{ steps.sad_path_wait_sec.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.sad_path_wait_sec.outcome }}
- uses: nick-invision/assert-action@v1
with:
expected: 'Final attempt failed'
actual: ${{ steps.sad_path_wait_sec.outputs.exit_error }}
comparison: contains
ci_integration_on_retry_cmd:
name: Run Integration Tests (on_retry_command)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: new-command-on-retry
id: new-command-on-retry
uses: ./
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
new_command_on_retry: node -e "console.log('this is the new command on retry')"
- name: on-retry-cmd
id: on-retry-cmd
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
on_retry_command: node -e "console.log('this is a retry command')"
- name: on-retry-cmd (on-retry fails)
id: on-retry-cmd-fails
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 3
command: node -e "process.exit(1)"
on_retry_command: node -e "throw new Error('This is an on-retry command error')"
# timeout tests take longer to run so run in parallel
ci_integration_timeout_seconds:
name: Run Integration Timeout Tests (seconds)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
# timeout tests (takes longer to run so run last)
- name: sad-path (timeout)
id: sad_path_timeout
uses: ./
@@ -394,19 +238,6 @@ jobs:
expected: failure
actual: ${{ steps.sad_path_timeout.outcome }}
ci_integration_timeout_retry_on_timeout:
name: Run Integration Timeout Tests (retry_on timeout)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: retry_on (timeout)
id: retry_on_timeout
uses: ./
@@ -425,19 +256,6 @@ jobs:
expected: failure
actual: ${{ steps.retry_on_timeout.outcome }}
ci_integration_timeout_retry_on_error:
name: Run Integration Timeout Tests (retry_on error)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: retry_on (error) fails early if timeout encountered
id: retry_on_error_fail
uses: ./
@@ -460,19 +278,6 @@ jobs:
expected: 1
actual: ${{ steps.retry_on_error_fail.outputs.exit_code }}
ci_integration_timeout_minutes:
name: Run Integration Timeout Tests (minutes)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: sad-path (timeout minutes)
id: sad_path_timeout_minutes
uses: ./
@@ -484,14 +289,141 @@ jobs:
- uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.sad_path_timeout_minutes.outputs.total_attempts }}
actual: ${{ steps.sad_path_timeout.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.sad_path_timeout_minutes.outcome }}
actual: ${{ steps.sad_path_timeout.outcome }}
- name: sad-path (wrong shell for OS)
id: wrong_shell
uses: ./
continue-on-error: true
with:
timeout_minutes: 1
max_attempts: 2
shell: cmd
command: 'dir'
- uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.wrong_shell.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: failure
actual: ${{ steps.wrong_shell.outcome }}
ci_multiline_tests:
name: Run Tests (Multiline)
if: startsWith(github.ref, 'refs/heads')
runs-on: ubuntu-18.04
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: 16
- name: Install dependencies
run: npm ci
- name: Multi-line 2 commands non existent first command
id: multi_line_2_commands_non_existent_first_command
uses: ./
continue-on-error: true
with:
shell: bash
timeout_seconds: 1
max_attempts: 2
command: |
i-do-not-exist && \
echo "i-exist"
- uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.multi_line_2_commands_non_existent_first_command.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: 'Final attempt failed'
actual: ${{ steps.multi_line_2_commands_non_existent_first_command.outputs.exit_error }}
comparison: contains
- uses: nick-invision/assert-action@v1
with:
# The 127 error code indicates “command not found”.
expected: '127'
actual: ${{ steps.multi_line_2_commands_non_existent_first_command.outputs.exit_code }}
comparison: contains
- uses: nick-invision/assert-action@v1
with:
expected: 'i-exist'
actual: ${{ steps.multi_line_2_commands_non_existent_first_command.outputs.exit_error }}
comparison: notContains
- name: Multi-line 2 commands happy path test
id: multi_line_2_commands_happy_path
uses: ./
with:
shell: bash
timeout_seconds: 1
max_attempts: 2
command: |
echo "foo" && \
echo "bar"
- uses: nick-invision/assert-action@v1
with:
expected: 1
actual: ${{ steps.multi_line_2_commands_happy_path.outputs.total_attempts }}
- name: Conventional multi-line non existent first command
id: conventional_multi_line_non_existent_first_command
uses: ./
continue-on-error: true
with:
shell: bash
timeout_seconds: 1
max_attempts: 2
command: |
i-do-not-exist
echo "i-exist"
- uses: nick-invision/assert-action@v1
with:
expected: 2
actual: ${{ steps.conventional_multi_line_non_existent_first_command.outputs.total_attempts }}
- uses: nick-invision/assert-action@v1
with:
expected: 'Final attempt failed'
actual: ${{ steps.conventional_multi_line_non_existent_first_command.outputs.exit_error }}
comparison: contains
- uses: nick-invision/assert-action@v1
with:
# The 127 error code indicates “command not found”.
expected: '127'
actual: ${{ steps.conventional_multi_line_non_existent_first_command.outputs.exit_code }}
comparison: contains
- uses: nick-invision/assert-action@v1
with:
expected: 'i-exist'
actual: ${{ steps.conventional_multi_line_non_existent_first_command.outputs.exit_error }}
comparison: notContains
- name: Conventional multi-line happy path test
id: conventional_multi_line_happy_path
uses: ./
with:
shell: bash
timeout_seconds: 1
max_attempts: 2
command: |
echo "foo"
echo "bar"
- uses: nick-invision/assert-action@v1
with:
expected: 1
actual: ${{ steps.conventional_multi_line_happy_path.outputs.total_attempts }}
ci_windows:
name: Run Windows Tests
if: startsWith(github.ref, 'refs/heads')
runs-on: windows-latest
steps:
- name: Checkout
@@ -541,34 +473,12 @@ jobs:
echo "this is
a test"
ci_all_tests_passed:
name: All tests passed
needs:
[
ci_unit,
ci_integration,
ci_integration_envvar,
ci_integration_large_output,
ci_integration_on_retry_cmd,
ci_integration_retry_wait_seconds,
ci_integration_continue_on_error,
ci_integration_retry_on_exit_code,
ci_integration_timeout_seconds,
ci_integration_timeout_minutes,
ci_integration_timeout_retry_on_timeout,
ci_integration_timeout_retry_on_error,
ci_windows,
]
runs-on: ubuntu-latest
steps:
- run: echo "If this is hit, all tests successfully passed"
# runs on merge to default only
# runs on push to master only
cd:
name: Publish Action
needs: [ci_all_tests_passed]
needs: ci
if: github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
runs-on: ubuntu-18.04
steps:
- name: Checkout
uses: actions/checkout@v2
@@ -580,7 +490,7 @@ jobs:
run: npm ci
- name: Release
id: semantic
uses: cycjimmy/semantic-release-action@v4
uses: cycjimmy/semantic-release-action@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Tag

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# lint commit message
npx --no -- commitlint --config ./.config/.commitlintrc.js --edit $1

View File

@@ -1,8 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# run lint/styling on staged changes
npx lint-staged
# regenerate dist
npm run prepare && git add .

2
.nvmrc
View File

@@ -1 +1 @@
v16.16.0
v16.14.2

7
.prettierrc.js Normal file
View File

@@ -0,0 +1,7 @@
module.exports = {
tabWidth: 2,
printWidth: 100,
semi: true,
singleQuote: true,
trailingComma: 'es5',
};

View File

@@ -1,3 +0,0 @@
{
"recommendations": ["esbenp.prettier-vscode"]
}

View File

@@ -1,7 +1,6 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"prettier.configPath": "./.config/.prettierrc.yml",
"prettier.ignorePath": "./.config/.prettierignore",
"typescript.tsdk": "node_modules/typescript/lib"
"prettier.requireConfig": true,
"typescript.tsdk": "node_modules/typescript/lib",
"editor.tabSize": 2
}

View File

@@ -30,7 +30,7 @@ Retries an Action step on failure or timeout. This is currently intended to repl
### `shell`
**Optional** Shell to use to execute `command`. Defaults to `powershell` on Windows, `bash` otherwise. Supports bash, python, pwsh, sh, cmd, and powershell per [docs](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell)
**Optional** Shell to use to execute `command`. Defaults to `powershell` on Windows, `bash` otherwise. Supports bash, python, pwsh, sh, cmd, and powershell per [docs](https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#using-a-specific-shell)
### `polling_interval_seconds`
@@ -156,7 +156,7 @@ with:
```yaml
- uses: nick-fields/retry@v2
id: retry
# see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontinue-on-error
# see https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idcontinue-on-error
continue-on-error: true
with:
timeout_seconds: 15

View File

@@ -50,5 +50,5 @@ outputs:
exit_error:
description: The final error returned by the command
runs:
using: 'node16'
using: 'node12'
main: 'dist/index.js'

1131
dist/index.js vendored

File diff suppressed because it is too large Load Diff

10541
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,13 +3,8 @@
"version": "0.0.0-managed-by-semantic-release",
"description": "Retries a GitHub Action step on failure or timeout.",
"scripts": {
"lint:base": "eslint --config ./.config/.eslintrc.js ",
"lint": "npm run lint:base -- .",
"local": "npm run prepare && node -r dotenv/config ./dist/index.js",
"prepare": "ncc build src/index.ts && husky install",
"style:base": "prettier --config ./.config/.prettierrc.yml --ignore-path ./.config/.prettierignore --write ",
"style": "npm run style:base -- .",
"test": "jest -c ./.config/jest.config.js"
"prepare": "ncc build src/index.ts"
},
"repository": {
"type": "git",
@@ -23,7 +18,7 @@
},
"homepage": "https://github.com/nick-invision/retry#readme",
"dependencies": {
"@actions/core": "^1.10.1",
"@actions/core": "^1.8.2",
"milliseconds": "^1.0.3",
"tree-kill": "^1.2.2"
},
@@ -32,35 +27,19 @@
"@commitlint/config-conventional": "^16.2.1",
"@semantic-release/changelog": "^6.0.1",
"@semantic-release/git": "^10.0.1",
"@types/jest": "^28.1.6",
"@types/milliseconds": "0.0.30",
"@types/node": "^16.11.7",
"@typescript-eslint/eslint-plugin": "^5.32.0",
"@typescript-eslint/parser": "^5.32.0",
"@types/node": "14.14.7",
"@zeit/ncc": "^0.20.5",
"dotenv": "8.2.0",
"eslint": "^8.21.0",
"eslint-config-prettier": "^8.5.0",
"husky": "^8.0.1",
"jest": "^28.1.3",
"lint-staged": "^13.0.3",
"prettier": "^2.7.1",
"husky": "^4.3.8",
"semantic-release": "19.0.3",
"ts-jest": "^28.0.7",
"ts-node": "9.0.0",
"typescript": "^4.7.4",
"yaml-lint": "^1.7.0"
"typescript": "4.0.5"
},
"lint-staged": {
"**/*.ts": [
"npm run style:base --",
"npm run lint:base --"
],
"**/*.{md,yaml,yml}": [
"npm run style:base --"
],
"**/*.{yaml,yml}": [
"npx yamllint "
]
"husky": {
"hooks": {
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS",
"pre-commit": "npm run prepare && git add ."
}
}
}

View File

@@ -1,11 +1,7 @@
# these are the bare minimum envvars required
INPUT_TIMEOUT_MINUTES=1
INPUT_MAX_ATTEMPTS=3
INPUT_COMMAND="node -e 'process.exit(99)'"
INPUT_CONTINUE_ON_ERROR=false
# these are optional
#INPUT_RETRY_WAIT_SECONDS=10
#SHELL=pwsh
#INPUT_POLLING_INTERVAL_SECONDS=1
#INPUT_RETRY_ON=any
INPUT_RETRY_WAIT_SECONDS=10
SHELL=pwsh
INPUT_POLLING_INTERVAL_SECONDS=1
INPUT_RETRY_ON=any

View File

@@ -1,85 +1,144 @@
import { error, warning, info, debug, setOutput } from '@actions/core';
import { execSync, spawn } from 'child_process';
import { getInput, error, warning, info, debug, setOutput } from '@actions/core';
import { exec, execSync } from 'child_process';
import ms from 'milliseconds';
import kill from 'tree-kill';
import { getInputs, getTimeout, Inputs, validateInputs } from './inputs';
import { retryWait, wait } from './util';
import { wait } from './util';
// inputs
const TIMEOUT_MINUTES = getInputNumber('timeout_minutes', false);
const TIMEOUT_SECONDS = getInputNumber('timeout_seconds', false);
const MAX_ATTEMPTS = getInputNumber('max_attempts', true) || 3;
const COMMAND = getInput('command', { required: true });
const RETRY_WAIT_SECONDS = getInputNumber('retry_wait_seconds', false) || 10;
const SHELL = getInput('shell');
const POLLING_INTERVAL_SECONDS = getInputNumber('polling_interval_seconds', false) || 1;
const RETRY_ON = getInput('retry_on') || 'any';
const WARNING_ON_RETRY = getInput('warning_on_retry').toLowerCase() === 'true';
const ON_RETRY_COMMAND = getInput('on_retry_command');
const CONTINUE_ON_ERROR = getInputBoolean('continue_on_error');
const NEW_COMMAND_ON_RETRY = getInput('new_command_on_retry');
const RETRY_ON_EXIT_CODE = getInputNumber('retry_on_exit_code', false);
const OS = process.platform;
const OUTPUT_TOTAL_ATTEMPTS_KEY = 'total_attempts';
const OUTPUT_EXIT_CODE_KEY = 'exit_code';
const OUTPUT_EXIT_ERROR_KEY = 'exit_error';
let exit: number;
let done: boolean;
var exit: number;
var done: boolean;
function getExecutable(inputs: Inputs): string {
if (!inputs.shell) {
return OS === 'win32' ? 'powershell' : 'bash';
function getInputNumber(id: string, required: boolean): number | undefined {
const input = getInput(id, { required });
const num = Number.parseInt(input);
// empty is ok
if (!input && !required) {
return;
}
if (!Number.isInteger(num)) {
throw `Input ${id} only accepts numbers. Received ${input}`;
}
return num;
}
function getInputBoolean(id: string): Boolean {
const input = getInput(id);
if (!['true','false'].includes(input.toLowerCase())) {
throw `Input ${id} only accepts boolean values. Received ${input}`;
}
return input.toLowerCase() === 'true'
}
async function retryWait() {
const waitStart = Date.now();
await wait(ms.seconds(RETRY_WAIT_SECONDS));
debug(`Waited ${Date.now() - waitStart}ms`);
debug(`Configured wait: ${ms.seconds(RETRY_WAIT_SECONDS)}ms`);
}
async function validateInputs() {
if ((!TIMEOUT_MINUTES && !TIMEOUT_SECONDS) || (TIMEOUT_MINUTES && TIMEOUT_SECONDS)) {
throw new Error('Must specify either timeout_minutes or timeout_seconds inputs');
}
}
function getTimeout(): number {
if (TIMEOUT_MINUTES) {
return ms.minutes(TIMEOUT_MINUTES);
} else if (TIMEOUT_SECONDS) {
return ms.seconds(TIMEOUT_SECONDS);
}
throw new Error('Must specify either timeout_minutes or timeout_seconds inputs');
}
function getExecutable(): string {
if (!SHELL) {
return OS === 'win32' ? 'powershell' : 'bash -e';
}
let executable: string;
const shellName = inputs.shell.split(' ')[0];
switch (shellName) {
case 'bash':
case 'python':
case 'pwsh': {
executable = inputs.shell;
switch (SHELL) {
case "bash": {
// -e to not ignore errors, but exit with non-zero code.
executable = "bash -e";
break;
}
case 'sh': {
case "python":
case "pwsh": {
executable = SHELL;
break;
}
case "sh": {
if (OS === 'win32') {
throw new Error(`Shell ${shellName} not allowed on OS ${OS}`);
throw new Error(`Shell ${SHELL} not allowed on OS ${OS}`);
}
executable = inputs.shell;
executable = SHELL;
break;
}
case 'cmd':
case 'powershell': {
case "cmd":
case "powershell": {
if (OS !== 'win32') {
throw new Error(`Shell ${shellName} not allowed on OS ${OS}`);
throw new Error(`Shell ${SHELL} not allowed on OS ${OS}`);
}
executable = shellName + '.exe' + inputs.shell.replace(shellName, '');
executable = SHELL + ".exe";
break;
}
default: {
throw new Error(
`Shell ${shellName} not supported. See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell for supported shells`
);
throw new Error(`Shell ${SHELL} not supported. See https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#using-a-specific-shell for supported shells`);
}
}
return executable;
return executable
}
async function runRetryCmd(inputs: Inputs): Promise<void> {
async function runRetryCmd(): Promise<void> {
// if no retry script, just continue
if (!inputs.on_retry_command) {
if (!ON_RETRY_COMMAND) {
return;
}
try {
await execSync(inputs.on_retry_command, { stdio: 'inherit' });
// eslint-disable-next-line
} catch (error: any) {
info(`WARNING: Retry command threw the error ${error.message}`);
await execSync(ON_RETRY_COMMAND, { stdio: 'inherit' });
} catch (error) {
info(`WARNING: Retry command threw the error ${error.message}`)
}
}
async function runCmd(attempt: number, inputs: Inputs) {
const end_time = Date.now() + getTimeout(inputs);
const executable = getExecutable(inputs);
async function runCmd(attempt: number) {
const end_time = Date.now() + getTimeout();
const executable = getExecutable();
exit = 0;
done = false;
let timeout = false;
debug(`Running command ${inputs.command} on ${OS} using shell ${executable}`);
const child =
attempt > 1 && inputs.new_command_on_retry
? spawn(inputs.new_command_on_retry, { shell: executable })
: spawn(inputs.command, { shell: executable });
debug(`Running command ${COMMAND} on ${OS} using shell "${executable}"`)
var child = attempt > 1 && NEW_COMMAND_ON_RETRY
? exec(NEW_COMMAND_ON_RETRY, { 'shell': executable })
: exec(COMMAND, { 'shell': executable });
child.stdout?.on('data', (data) => {
process.stdout.write(data);
@@ -91,66 +150,56 @@ async function runCmd(attempt: number, inputs: Inputs) {
child.on('exit', (code, signal) => {
debug(`Code: ${code}`);
debug(`Signal: ${signal}`);
if (code && code > 0) {
exit = code;
}
// timeouts are killed manually
if (signal === 'SIGTERM') {
return;
}
// On Windows signal is null.
if (timeout) {
return;
}
if (code && code > 0) {
exit = code;
}
done = true;
});
do {
await wait(ms.seconds(inputs.polling_interval_seconds));
await wait(ms.seconds(POLLING_INTERVAL_SECONDS));
} while (Date.now() < end_time && !done);
if (!done && child.pid) {
timeout = true;
if (!done) {
kill(child.pid);
await retryWait(ms.seconds(inputs.retry_wait_seconds));
throw new Error(`Timeout of ${getTimeout(inputs)}ms hit`);
await retryWait();
throw new Error(`Timeout of ${getTimeout()}ms hit`);
} else if (exit > 0) {
await retryWait(ms.seconds(inputs.retry_wait_seconds));
await retryWait();
throw new Error(`Child_process exited with error code ${exit}`);
} else {
return;
}
}
async function runAction(inputs: Inputs) {
await validateInputs(inputs);
async function runAction() {
await validateInputs();
for (let attempt = 1; attempt <= inputs.max_attempts; attempt++) {
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
try {
// just keep overwriting attempts output
setOutput(OUTPUT_TOTAL_ATTEMPTS_KEY, attempt);
await runCmd(attempt, inputs);
await runCmd(attempt);
info(`Command completed after ${attempt} attempt(s).`);
break;
// eslint-disable-next-line
} catch (error: any) {
if (attempt === inputs.max_attempts) {
} catch (error) {
if (attempt === MAX_ATTEMPTS) {
throw new Error(`Final attempt failed. ${error.message}`);
} else if (!done && inputs.retry_on === 'error') {
} else if (!done && RETRY_ON === 'error') {
// error: timeout
throw error;
} else if (inputs.retry_on_exit_code && inputs.retry_on_exit_code !== exit) {
} else if (RETRY_ON_EXIT_CODE && RETRY_ON_EXIT_CODE !== exit){
throw error;
} else if (exit > 0 && inputs.retry_on === 'timeout') {
} else if (exit > 0 && RETRY_ON === 'timeout') {
// error: error
throw error;
} else {
await runRetryCmd(inputs);
if (inputs.warning_on_retry) {
await runRetryCmd();
if (WARNING_ON_RETRY) {
warning(`Attempt ${attempt} failed. Reason: ${error.message}`);
} else {
info(`Attempt ${attempt} failed. Reason: ${error.message}`);
@@ -160,9 +209,7 @@ async function runAction(inputs: Inputs) {
}
}
const inputs = getInputs();
runAction(inputs)
runAction()
.then(() => {
setOutput(OUTPUT_EXIT_CODE_KEY, 0);
process.exit(0); // success
@@ -171,7 +218,7 @@ runAction(inputs)
// exact error code if available, otherwise just 1
const exitCode = exit > 0 ? exit : 1;
if (inputs.continue_on_error) {
if (CONTINUE_ON_ERROR) {
warning(err.message);
} else {
error(err.message);
@@ -183,5 +230,5 @@ runAction(inputs)
// if continue_on_error, exit with exact error code else exit gracefully
// mimics native continue-on-error that is not supported in composite actions
process.exit(inputs.continue_on_error ? 0 : exitCode);
process.exit(CONTINUE_ON_ERROR ? 0 : exitCode);
});

View File

@@ -1,94 +0,0 @@
import { getInput } from '@actions/core';
import ms from 'milliseconds';
export interface Inputs {
timeout_minutes: number | undefined;
timeout_seconds: number | undefined;
max_attempts: number;
command: string;
retry_wait_seconds: number;
shell: string | undefined;
polling_interval_seconds: number;
retry_on: string | undefined;
warning_on_retry: boolean;
on_retry_command: string | undefined;
continue_on_error: boolean;
new_command_on_retry: string | undefined;
retry_on_exit_code: number | undefined;
}
export function getInputNumber(id: string, required: boolean): number | undefined {
const input = getInput(id, { required });
const num = Number.parseInt(input);
// empty is ok
if (!input && !required) {
return;
}
if (!Number.isInteger(num)) {
throw `Input ${id} only accepts numbers. Received ${input}`;
}
return num;
}
export function getInputBoolean(id: string): boolean {
const input = getInput(id);
if (!['true', 'false'].includes(input.toLowerCase())) {
throw `Input ${id} only accepts boolean values. Received ${input}`;
}
return input.toLowerCase() === 'true';
}
export async function validateInputs(inputs: Inputs) {
if (
(!inputs.timeout_minutes && !inputs.timeout_seconds) ||
(inputs.timeout_minutes && inputs.timeout_seconds)
) {
throw new Error('Must specify either timeout_minutes or timeout_seconds inputs');
}
}
export function getTimeout(inputs: Inputs): number {
if (inputs.timeout_minutes) {
return ms.minutes(inputs.timeout_minutes);
} else if (inputs.timeout_seconds) {
return ms.seconds(inputs.timeout_seconds);
}
throw new Error('Must specify either timeout_minutes or timeout_seconds inputs');
}
export function getInputs(): Inputs {
const timeout_minutes = getInputNumber('timeout_minutes', false);
const timeout_seconds = getInputNumber('timeout_seconds', false);
const max_attempts = getInputNumber('max_attempts', true) || 3;
const command = getInput('command', { required: true });
const retry_wait_seconds = getInputNumber('retry_wait_seconds', false) || 10;
const shell = getInput('shell');
const polling_interval_seconds = getInputNumber('polling_interval_seconds', false) || 1;
const retry_on = getInput('retry_on') || 'any';
const warning_on_retry = getInput('warning_on_retry').toLowerCase() === 'true';
const on_retry_command = getInput('on_retry_command');
const continue_on_error = getInputBoolean('continue_on_error');
const new_command_on_retry = getInput('new_command_on_retry');
const retry_on_exit_code = getInputNumber('retry_on_exit_code', false);
return {
timeout_minutes,
timeout_seconds,
max_attempts,
command,
retry_wait_seconds,
shell,
polling_interval_seconds,
retry_on,
warning_on_retry,
on_retry_command,
continue_on_error,
new_command_on_retry,
retry_on_exit_code,
};
}

View File

@@ -1,17 +0,0 @@
import 'jest';
import { getHeapStatistics } from 'v8';
import { wait } from './util';
// mocks the setTimeout function, see https://jestjs.io/docs/timer-mocks
jest.useFakeTimers();
jest.spyOn(global, 'setTimeout');
describe('util', () => {
test('wait', async () => {
const waitTime = 1000;
wait(waitTime);
expect(setTimeout).toHaveBeenCalledTimes(1);
expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), waitTime);
});
});

View File

@@ -1,12 +1,3 @@
import { debug } from '@actions/core';
export async function wait(ms: number) {
return new Promise((r) => setTimeout(r, ms));
}
export async function retryWait(retryWaitSeconds: number) {
const waitStart = Date.now();
await wait(retryWaitSeconds);
debug(`Waited ${Date.now() - waitStart}ms`);
debug(`Configured wait: ${retryWaitSeconds}ms`);
}

View File

@@ -1,13 +0,0 @@
SHELL = bash
# this tests fix for the following issues
# https://github.com/nick-fields/retry/issues/76
# https://github.com/nick-fields/retry/issues/84
bytes-%:
for i in {1..$*}; do cat kibibyte.txt; done; exit 2
.PHONY: bytes-%
lines-%:
for i in {1..$*}; do echo a; done; exit 2
.PHONY: lines-%

View File

@@ -1,13 +0,0 @@
1: 0000 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
2: 0081 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
3: 0162 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
4: 243 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
5: 324 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
6: 405 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
7: 486 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
8: 567 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
9: 648 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
a: 729 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
b: 810 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
c: 891 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
d: 972 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa