build: inline external Github Actions to unblock CI (#12241)

* build: inline cached-dependencies to unblock CI

* Run E2E on pull_request on;y

* Inline all external actions

* Checkout needed for internal actions

Also fixes pre-commit

* Add missing files
This commit is contained in:
Jesse Yang 2021-01-04 04:16:07 -08:00 committed by GitHub
parent 7cc0de1694
commit a3bbbf8ea3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
173 changed files with 48871 additions and 78 deletions

View File

@ -0,0 +1 @@
indent_size = 2

View File

@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@ -0,0 +1,26 @@
module.exports = {
plugins: ['jest', '@typescript-eslint'],
extends: ['plugin:jest/all'],
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 9,
sourceType: 'module',
},
rules: {
'eslint-comments/no-use': 'off',
'import/no-namespace': 'off',
'no-unused-vars': 'off',
'no-console': 'off',
'jest/prefer-expect-assertions': 'off',
'jest/no-disabled-tests': 'warn',
'jest/no-focused-tests': 'error',
'jest/no-identical-title': 'error',
'jest/prefer-to-have-length': 'warn',
'jest/valid-expect': 'error',
},
env: {
node: true,
es6: true,
'jest/globals': true,
},
};

View File

@ -0,0 +1,34 @@
name: Tests
on:
pull_request:
paths-ignore:
- '**.md'
push:
branches:
- master
paths-ignore:
- '**.md'
jobs:
test:
strategy:
matrix:
os: [ubuntu-latest, macOS-latest]
name: Test on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: '12.x'
- name: Install dependencies
run: npm ci
- name: Run prettier format check
run: npm run format-check
- name: Build
run: npm run build
- name: Run tests
run: npm run test
- name: Upload code coverage
run: |
bash <(curl -s https://codecov.io/bash)

View File

@ -0,0 +1,6 @@
lib
coverage
node_modules
!dist
!dist/cache

View File

@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": true,
"singleQuote": true,
"trailingComma": "all",
"bracketSpacing": true,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2018 GitHub, Inc. and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,212 @@
# cached-dependencies
[![](https://github.com/ktmud/cached-dependencies/workflows/Tests/badge.svg)](https://github.com/ktmud/cached-dependencies/actions?query=workflow%3ATests) [![codecov](https://codecov.io/gh/ktmud/cached-dependencies/branch/master/graph/badge.svg)](https://codecov.io/gh/ktmud/cached-dependencies)
Enable **multi-layer cache** and **shortcut commands** in any workflows.
Manage multiple cache targets in one step. Use either the built-in cache configs for npm, yarn, and pip, or write your own. Create a bash command library to easily reduce redudencies across workflows. Most useful for building webapps that require multi-stage building processes.
This is your all-in-one action for everything related to setting up dependencies with cache.
## Inputs
- **run**: bash commands to run, allows shortcut commands
- **caches**: path to a JS module that defines cache targets, defaults to `.github/workflows/caches.js`
- **bashlib**: path to a BASH scripts that defines shortcut commands, defaults to `.github/workflows/bashlib.sh`
- **parallel**: whether to run the commands in parallel with node subprocesses
## Examples
Following workflow sets up dependencies for a typical Python web app with both `~/.pip` and `~/.npm` cache configured in one simple step:
```yaml
jobs:
build_and_test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install dependencies
uses: ktmud/cached-dependencies@v1
with:
run: |
npm-install
npm run build
pip-install
python ./bin/manager.py fill_test_data
```
Here we used predefined `npm-install` and `pip-install` commands to install dependencies with correponding caches.
You may also replace `npm-install` with `yarn-install` to install npm pacakges with `yarn.lock`.
```yaml
- name: Install dependencies
uses: ktmud/cached-dependencies@v1
with:
run: |
yarn-install
yarn build
pip-install
python ./bin/manager.py fill_test_data
```
See below for more details.
## Usage
### Cache configs
Under the hood, we use [@actions/cache](https://github.com/marketplace/actions/cache) to manage cache storage. But instead of defining only one cache at a time and specify them in workflow YAMLs, you manage all caches in a spearate JS file: `.github/workflows/caches.js`.
Here is [the default configuration](https://github.com/ktmud/cached-dependencies/blob/master/src/cache/caches.ts) for Linux:
```js
module.exports = {
pip: {
path: [`${process.env.HOME}/.cache/pip`],
hashFiles: ['requirements*.txt'],
keyPrefix: 'pip-',
restoreKeys: 'pip-',
},
npm: {
path: [`${HOME}/.npm`],
hashFiles: [
`package-lock.json`,
`*/*/package-lock.json`,
`!node_modules/*/package-lock.json`,
],
},
yarn: {
path: [`${HOME}/.npm`],
// */* is for supporting lerna monorepo with depth=2
hashFiles: [`yarn.lock`, `*/*/yarn.lock`, `!node_modules/*/yarn.lock`],
},
}
```
In which `hashFiles` and `keyPrefix` will be used to compute the primary cache key used in [@actions/cache](https://github.com/marketplace/actions/cache). `keyPrefix` will default to `${cacheName}-` and `restoreKeys` will default to `keyPrefix` if not specified.
It is recommended to always use absolute paths in these configs so you can share them across different worflows more easily (in case you the action is called from different working directories).
#### Speficy when to restore and save
With the predefined `cache-store` and `cache-save` bash commands, you have full flexibility on when to restore and save cache:
```yaml
steps:
- uses: actions/checkout@v2
- uses: ktmud/cached-dependencies@v1
with:
run: |
cache-restore npm
npm install
cache-save npm
cache-restore pip
pip install -r requirements.txt
cache-save pip
```
### Shortcut commands
All predefined shortcut commands can be found [here](https://github.com/ktmud/cached-dependencies/blob/master/src/scripts/bashlib.sh). You can also customize them or add new ones in `.github/workflows/bashlib.sh`.
For example, if you want to install additional packages for before saving `pip` cache, simply add this to the `bashlib.sh` file:
```bash
# override the default `pip-install` command
pip-install() {
cd $GITHUB_WORKSPACE
cache-restore pip
echo "::group::pip install"
pip install -r requirements.txt # prod requirements
pip install -r requirements-dev.txt # dev requirements
pip install -e ".[postgres,mysql]" # current pacakge with some extras
echo "::endgroup::"
cache-save pip
}
```
### Default setup command
When `run` is not provided:
```yaml
jobs:
name: Build
steps:
- name: Install dependencies
uses: ktmud/cached-depdencies@v1
```
You must provide a `default-setup-command` in the bashlib. For example,
```bash
default-setup-command() {
pip-install & npm-install
}
```
This will start installing pip and npm dependencies at the same time.
### Customize config locations
Both the two config files, `.github/workflows/bashlib.sh` and `.github/workflows/caches.js`, can be placed in other locations:
```yaml
- uses: ktmud/cached-dependencies@v1
with:
caches: ${{ github.workspace }}/.github/configs/caches.js
bashlib: ${{ github.workspace }}/.github/configs/bashlib.sh
```
### Run commands in parallel
When `parallel` is set to `true`, the `run` input will be split into an array of commands and passed to `Promise.all(...)` to execute in parallel. For example,
```yaml
- uses: ktmud/cached-dependencies@v1
with:
parallel: true
run: |
pip-install
npm-install
```
is equivalent to
```yaml
- uses: ktmud/cached-dependencies@v1
with:
run: |
pip-install & npm-install
```
If one or more of your commands must spread across multiple lines, you can add a new line between the parallel commands. Each command within a parallel group will still run sequentially.
```yaml
- uses: ktmud/cached-dependencies@v1
with:
run: |
cache-restore pip
pip install requirements*.txt
# additional pip packages
pip install package1 package2 pacakge2
cache-save pip
npm-install
cache-restore cypress
cd cypress/ && npm install
cache-save cypress
```
## License
This project is released under [the MIT License](LICENSE).

View File

@ -0,0 +1,124 @@
import path from 'path';
import * as fs from 'fs';
import * as os from 'os';
import * as core from '@actions/core';
import * as cache from '../src/cache';
import * as inputsUtils from '../src/utils/inputs';
import * as actionUtils from '@actions/cache/src/utils/actionUtils';
import defaultCaches from '../src/cache/caches';
import { setInputs, getInput, maybeArrayToString } from '../src/utils/inputs';
import { Inputs, InputName, GitHubEvent, EnvVariable } from '../src/constants';
import caches, { npmExpectedHash } from './fixtures/caches';
describe('patch core states', () => {
it('should log error if states file invalid', () => {
const logWarningMock = jest.spyOn(actionUtils, 'logWarning');
fs.writeFileSync(`${os.tmpdir()}/cached--states.json`, 'INVALID_JSON', {
encoding: 'utf-8',
});
core.getState('haha');
expect(logWarningMock).toHaveBeenCalledTimes(2);
});
it('should persist state', () => {
core.saveState('test', '100');
expect(core.getState('test')).toStrictEqual('100');
});
});
describe('cache runner', () => {
it('should use default cache config', async () => {
await cache.loadCustomCacheConfigs();
// but `npm` actually come from `src/cache/caches.ts`
const inputs = await cache.getCacheInputs('npm');
expect(inputs?.[InputName.Path]).toStrictEqual(
maybeArrayToString(defaultCaches.npm.path),
);
expect(inputs?.[InputName.RestoreKeys]).toStrictEqual('npm-');
});
it('should override cache config', async () => {
setInputs({
[InputName.Caches]: path.resolve(__dirname, 'fixtures/caches'),
});
await cache.loadCustomCacheConfigs();
const inputs = await cache.getCacheInputs('npm');
expect(inputs?.[InputName.Path]).toStrictEqual(
maybeArrayToString(caches.npm.path),
);
expect(inputs?.[InputName.Key]).toStrictEqual(`npm-${npmExpectedHash}`);
expect(inputs?.[InputName.RestoreKeys]).toStrictEqual(
maybeArrayToString(caches.npm.restoreKeys),
);
});
it('should apply inputs and restore cache', async () => {
setInputs({
[InputName.Caches]: path.resolve(__dirname, 'fixtures/caches'),
[EnvVariable.GitHubEventName]: GitHubEvent.PullRequest,
});
const setInputsMock = jest.spyOn(inputsUtils, 'setInputs');
const inputs = await cache.getCacheInputs('npm');
const result = await cache.run('restore', 'npm');
expect(result).toBeUndefined();
// before run
expect(setInputsMock).toHaveBeenNthCalledWith(1, inputs);
// after run
expect(setInputsMock).toHaveBeenNthCalledWith(2, {
[InputName.Key]: '',
[InputName.Path]: '',
[InputName.RestoreKeys]: '',
});
// inputs actually restored to original value
expect(getInput(InputName.Key)).toStrictEqual('');
// pretend still in execution context
setInputs(inputs as Inputs);
// `core.getState` should return the primary key
expect(core.getState('CACHE_KEY')).toStrictEqual(inputs?.[InputName.Key]);
setInputsMock.mockRestore();
});
it('should run saveCache', async () => {
// call to save should also work
const logWarningMock = jest.spyOn(actionUtils, 'logWarning');
setInputs({
[InputName.Parallel]: 'true',
});
await cache.run('save', 'npm');
expect(logWarningMock).toHaveBeenCalledWith(
'Cache Service Url not found, unable to restore cache.',
);
});
it('should exit on invalid args', async () => {
// other calls do generate errors
const processExitMock = jest
.spyOn(process, 'exit')
// @ts-ignore
.mockImplementation(() => {});
// incomplete arguments
await cache.run();
await cache.run('save');
// bad arguments
await cache.run('save', 'unknown-cache');
await cache.run('unknown-action', 'unknown-cache');
setInputs({
[InputName.Caches]: 'non-existent',
});
await cache.run('save', 'npm');
expect(processExitMock).toHaveBeenCalledTimes(5);
});
});

View File

@ -0,0 +1,5 @@
#!/bin/bash
default-setup-command() {
print-cachescript-path
}

View File

@ -0,0 +1,14 @@
/**
* Example cache config.
*/
export const npmHashFiles = ['.*ignore'];
export const npmExpectedHash =
'13ed29a1c7ec906e7dcb20626957ebfcd3f0f2174bd2685a012105792bf1ff55';
export default {
npm: {
path: [`~/.npm`],
hashFiles: npmHashFiles,
restoreKeys: 'node-npm-',
},
};

View File

@ -0,0 +1,101 @@
/**
* Test default runner.
*/
import { setInputs } from '../src/utils/inputs';
import { InputName, DefaultInputs } from '../src/constants';
import * as setup from '../src/setup';
import path from 'path';
const extraBashlib = path.resolve(__dirname, './fixtures/bashlib.sh');
describe('setup runner', () => {
// don't actually run the bash script
const runCommandMock = jest.spyOn(setup, 'runCommand');
it('should allow custom bashlib', async () => {
setInputs({
[InputName.Bashlib]: extraBashlib,
});
await setup.run();
expect(runCommandMock).toHaveBeenCalledTimes(1);
expect(runCommandMock).toHaveBeenCalledWith(
DefaultInputs[InputName.Run],
extraBashlib,
);
});
it('should allow inline bash overrides', async () => {
const processExitMock = jest
.spyOn(process, 'exit')
// @ts-ignore
.mockImplementation(() => {});
setInputs({
[InputName.Bashlib]: '',
[InputName.Parallel]: 'false',
[InputName.Run]: `
${DefaultInputs[InputName.Run]}() {
echo "It works!"
exit 202
}
${DefaultInputs[InputName.Run]}
`,
});
// allow the bash script to run for one test, but override the default
await setup.run();
expect(runCommandMock).toHaveBeenCalledTimes(1);
expect(processExitMock).toHaveBeenCalledTimes(1);
expect(processExitMock).toHaveBeenCalledWith(1);
});
it('should use run commands', async () => {
// don't run the commands when there is no overrides
runCommandMock.mockImplementation(async () => {});
setInputs({
[InputName.Bashlib]: 'non-existent',
[InputName.Run]: 'print-cachescript-path',
});
await setup.run();
expect(runCommandMock).toHaveBeenCalledTimes(1);
expect(runCommandMock).toHaveBeenCalledWith('print-cachescript-path', '');
});
it('should handle single-new-line parallel commands', async () => {
setInputs({
[InputName.Run]: `
test-command-1
test-command-2
`,
[InputName.Parallel]: 'true',
});
await setup.run();
expect(runCommandMock).toHaveBeenNthCalledWith(1, 'test-command-1', '');
expect(runCommandMock).toHaveBeenNthCalledWith(2, 'test-command-2', '');
});
it('should handle multi-new-line parallel commands', async () => {
setInputs({
[InputName.Run]: `
test-1-1
test-1-2
test-2
`,
[InputName.Parallel]: 'true',
});
await setup.run();
expect(runCommandMock).toHaveBeenNthCalledWith(
1,
'test-1-1\n test-1-2',
'',
);
expect(runCommandMock).toHaveBeenNthCalledWith(2, 'test-2', '');
});
});

View File

@ -0,0 +1,10 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"baseUrl": "./",
"outDir": "../build",
"noEmit": true,
"rootDir": "../"
},
"exclude": ["node_modules"]
}

View File

@ -0,0 +1,25 @@
name: Cached Dependencies
description: Setup multi-layered cache and dependencies in one step, share predefined commands across workflows
author: Jesse Yang <hello@yjc.me>
branding:
icon: layers
color: yellow
inputs:
caches:
required: false
description: Path to a JS file with cache configs
default: ${{ github.workspace }}/.github/workflows/caches.js
bashlib:
required: false
description: Path to a Bash script with command shortcuts
default: ${{ github.workspace }}/.github/workflows/bashlib.sh
run:
required: false
description: Setup commands to run, can use shortcuts defined in bashlib
default: default-setup-command
parallel:
required: false
description: Whether to run commands in parallel
runs:
using: node12
main: dist/index.js

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,57 @@
'use strict';
const fs = require('fs');
const crypto = require('crypto');
const {parentPort} = require('worker_threads');
const handlers = {
hashFile: (algorithm, filePath) => new Promise((resolve, reject) => {
const hasher = crypto.createHash(algorithm);
fs.createReadStream(filePath)
// TODO: Use `Stream.pipeline` when targeting Node.js 12.
.on('error', reject)
.pipe(hasher)
.on('error', reject)
.on('finish', () => {
const {buffer} = hasher.read();
resolve({value: buffer, transferList: [buffer]});
});
}),
hash: async (algorithm, input) => {
const hasher = crypto.createHash(algorithm);
if (Array.isArray(input)) {
for (const part of input) {
hasher.update(part);
}
} else {
hasher.update(input);
}
const hash = hasher.digest().buffer;
return {value: hash, transferList: [hash]};
}
};
parentPort.on('message', async message => {
try {
const {method, args} = message;
const handler = handlers[method];
if (handler === undefined) {
throw new Error(`Unknown method '${method}'`);
}
const {value, transferList} = await handler(...args);
parentPort.postMessage({id: message.id, value}, transferList);
} catch (error) {
const newError = {message: error.message, stack: error.stack};
for (const [key, value] of Object.entries(error)) {
if (typeof value !== 'object') {
newError[key] = value;
}
}
parentPort.postMessage({id: message.id, error: newError});
}
});

View File

@ -0,0 +1,21 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
transform: {
'^.+\\.ts$': 'ts-jest',
},
transformIgnorePatterns: [
'/node_modules/(?!@actions).+\\.js$',
],
verbose: true,
};
// suppress debug messages
const processStdoutWrite = process.stdout.write.bind(process.stdout);
process.stdout.write = (str, encoding, cb) => {
processStdoutWrite(str.split('\n').filter(x => {
return !/^::debug::/.test(x);
}).join('\n'), encoding, cb);
};

8197
.github/actions/cached-dependencies/package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
{
"name": "setup-superset-action",
"version": "1.0.0",
"private": true,
"keywords": [
"actions",
"node",
"setup",
"superset"
],
"main": "dist/run",
"scripts": {
"all": "npm run format && npm run lint && npm run test && npm run build",
"build": "npm run clean && tsc && ncc build -o dist src/run.ts && ncc build -o dist/scripts/cache src/scripts/cache.ts",
"clean": "rm -rf ./lib ./dist",
"coverage": "npm run test && open ./coverage/lcov-report/index.html",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"test": "jest --clearCache && jest --coverage"
},
"dependencies": {
"@actions/cache": "actions/cache#d29c1df198dd38ac88e0ae23a2881b99c2d20e68",
"@actions/core": "1.2.4",
"@actions/exec": "1.0.4",
"@actions/glob": "0.1.0",
"@types/uuid": "7.0.4",
"hasha": "5.2.0",
"tempy": "0.6.0",
"uuid": "7.0.3"
},
"devDependencies": {
"@types/jest": "26.0.7",
"@types/node": "12.12.53",
"@typescript-eslint/eslint-plugin": "3.7.1",
"@typescript-eslint/parser": "3.7.1",
"@zeit/ncc": "0.22.3",
"eslint": "7.5.0",
"eslint-plugin-jest": "23.19.0",
"jest": "26.1.0",
"js-yaml": "3.14.0",
"prettier": "2.0.5",
"prettier-plugin-packagejson": "2.2.5",
"ts-jest": "26.1.4",
"typescript": "3.9.7"
}
}

View File

@ -0,0 +1,5 @@
{
"extends": [
"config:base"
]
}

View File

@ -0,0 +1,49 @@
/**
* Default cache configs
*/
import * as os from 'os';
export interface CacheConfig {
path: string[] | string;
hashFiles: string[] | string;
keyPrefix?: string;
restoreKeys?: string[] | string;
}
export interface CacheConfigs {
[cacheName: string]: CacheConfig;
}
const { HOME = '~' } = process.env;
const platform = os.platform() as 'linux' | 'darwin' | 'win32';
const pathByPlatform = {
linux: {
pip: `${HOME}/.cache/pip`,
},
darwin: {
pip: `${HOME}/Library/Caches/pip`,
},
win32: {
pip: `${HOME}\\AppData\\Local\\pip\\Cache`,
},
};
export default {
pip: {
path: pathByPlatform[platform].pip,
hashFiles: 'requirements*.txt',
},
npm: {
path: `${HOME}/.npm`,
hashFiles: [
`package-lock.json`,
// support lerna monorepo with depth=2
`*/*/package-lock.json`,
`!node_modules/*/package-lock.json`,
],
},
yarn: {
path: `${HOME}/.npm`,
hashFiles: [`yarn.lock`, `*/*/yarn.lock`, `!node_modules/*/yarn.lock`],
},
} as CacheConfigs;

View File

@ -0,0 +1,146 @@
/**
* Execute @actions/cache with predefined cache configs.
*/
import { beginImport, doneImport } from './patch'; // monkey patch @actions modules
beginImport();
import saveCache from '@actions/cache/src/save';
import restoreCache from '@actions/cache/src/restore';
doneImport();
import hasha from 'hasha';
import * as fs from 'fs';
import * as core from '@actions/core';
import * as glob from '@actions/glob';
import { Inputs, InputName, DefaultInputs } from '../constants';
import { applyInputs, getInput, maybeArrayToString } from '../utils/inputs';
import caches from './caches'; // default cache configs
// GitHub uses `sha256` for the built-in `${{ hashFiles(...) }}` expression
// https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles
const HASH_OPTION = { algorithm: 'sha256' };
/**
* Load custom cache configs from the `caches` path defined in inputs.
*
* @returns Whether the loading is successfull.
*/
export async function loadCustomCacheConfigs() {
const customCachePath = getInput(InputName.Caches);
try {
core.debug(`Reading cache configs from '${customCachePath}'`);
const customCache = await import(customCachePath);
Object.assign(caches, customCache.default);
} catch (error) {
if (
customCachePath !== DefaultInputs[InputName.Caches] ||
!error.message.includes('Cannot find module')
) {
core.error(error.message);
core.setFailed(
`Failed to load custom cache configs: '${customCachePath}'`,
);
return process.exit(1);
}
}
return true;
}
/**
* Generate SHA256 hash for a list of files matched by glob patterns.
*
* @param {string[]} patterns - The glob pattern.
* @param {string} extra - The extra string to append to the file hashes to
* comptue the final hash.
*/
export async function hashFiles(
patterns: string[] | string,
extra: string = '',
) {
const globber = await glob.create(maybeArrayToString(patterns));
let hash = '';
let counter = 0;
for await (const file of globber.globGenerator()) {
if (!fs.statSync(file).isDirectory()) {
hash += hasha.fromFileSync(file, HASH_OPTION);
counter += 1;
}
}
core.debug(`Computed hash for ${counter} files. Pattern: ${patterns}`);
return hasha(hash + extra, HASH_OPTION);
}
/**
* Generate GitHub Action inputs based on predefined cache config. Will be used
* to override env variables.
*
* @param {string} cacheName - Name of the predefined cache config.
*/
export async function getCacheInputs(
cacheName: string,
): Promise<Inputs | null> {
if (!(cacheName in caches)) {
return null;
}
const { keyPrefix, restoreKeys, path, hashFiles: patterns } = caches[
cacheName
];
const pathString = maybeArrayToString(path);
const prefix = keyPrefix || `${cacheName}-`;
// include `path` to hash, too, so to burse caches in case users change
// the path definition.
const hash = await hashFiles(patterns, pathString);
return {
[InputName.Key]: `${prefix}${hash}`,
[InputName.Path]: pathString,
// only use prefix as restore key if it is never defined
[InputName.RestoreKeys]:
restoreKeys === undefined ? prefix : maybeArrayToString(restoreKeys),
};
}
export const actions = {
restore(inputs: Inputs) {
return applyInputs(inputs, restoreCache);
},
save(inputs: Inputs) {
return applyInputs(inputs, saveCache);
},
};
export type ActionChoice = keyof typeof actions;
export async function run(
action: string | undefined = undefined,
cacheName: string | undefined = undefined,
) {
if (!action || !(action in actions)) {
core.setFailed(`Choose a cache action from: [restore, save]`);
return process.exit(1);
}
if (!cacheName) {
core.setFailed(`Must provide a cache name.`);
return process.exit(1);
}
const runInParallel = getInput(InputName.Parallel);
if (await loadCustomCacheConfigs()) {
if (runInParallel) {
core.info(`${action.toUpperCase()} cache for ${cacheName}`);
} else {
core.startGroup(`${action.toUpperCase()} cache for ${cacheName}`);
}
const inputs = await getCacheInputs(cacheName);
if (inputs) {
core.info(JSON.stringify(inputs, null, 2));
await actions[action as ActionChoice](inputs);
} else {
core.setFailed(`Cache '${cacheName}' not defined, failed to ${action}.`);
return process.exit(1);
}
if (!runInParallel) {
core.endGroup();
}
}
}

View File

@ -0,0 +1,95 @@
/**
* Monkey patch to safely import and use @action/cache modules
*/
import * as utils from '@actions/cache/src/utils/actionUtils';
import * as core from '@actions/core';
import * as fs from 'fs';
import * as os from 'os';
import { InputName } from '../constants';
import { getInput } from '../utils/inputs';
interface KeyValueStore {
[key: string]: any;
}
const { logWarning, isValidEvent } = utils;
const { getState, saveState } = core;
function getStateStoreFile() {
const cacheName = getInput(InputName.Key);
return `${os.tmpdir()}/cached-${cacheName}-states.json`;
}
/**
* Load states from the persistent store.
*
* The default `core.saveState` only writes states as command output, and
* `core.getState` is only possible to read the state in a later step via ENV
* variables.
*
* So we use a temp file to save and load states, so to allow persistent
* states within the same step.
*
* Since the state output is not uniq to caches, each cache should have their
* own file for persistent states.
*/
function loadStates() {
const stateStore = getStateStoreFile();
const states: KeyValueStore = {};
try {
Object.assign(
states,
JSON.parse(fs.readFileSync(stateStore, { encoding: 'utf-8' })),
);
core.debug(`Loaded states from: ${stateStore}`)
} catch (error) {
// pass
if (error.code !== 'ENOENT') {
utils.logWarning(`Could not load states: ${stateStore}`)
utils.logWarning(error.message);
}
}
return states;
}
/**
* Save states to the persistent storage.
*/
function persistState(name: string, value: any) {
const states = loadStates();
const stateStore = getStateStoreFile();
const valueString = typeof value === 'string' ? value : JSON.stringify(value);
// make sure value is always string
states[name] = valueString;
// persist state in the temp file
fs.writeFileSync(stateStore, JSON.stringify(states, null, 2), {
encoding: 'utf-8',
});
core.debug(`Persist state "${name}=${valueString}" to ${stateStore}`);
// still pass the original value to the original function, though
return saveState(name, value);
}
/**
* Get states from persistent store, fallback to "official" states.
*/
function obtainState(name: string) {
const states = loadStates();
return states[name] || getState(name);
}
export function beginImport() {
Object.defineProperty(utils, 'isValidEvent', { value: () => false });
Object.defineProperty(utils, 'logWarning', { value: () => {} });
}
export function doneImport() {
Object.defineProperty(utils, 'isValidEvent', { value: isValidEvent });
Object.defineProperty(utils, 'logWarning', { value: logWarning });
Object.defineProperty(core, 'saveState', { value: persistState });
Object.defineProperty(core, 'getState', { value: obtainState });
}

View File

@ -0,0 +1,43 @@
// Possible input names
export enum InputName {
// @actions/cache specific inputs
Key = 'key',
Path = 'path',
RestoreKeys = 'restore-keys',
// setup-webapp specific inputs
Run = 'run',
Caches = 'caches',
Bashlib = 'bashlib',
Parallel = 'parallel',
}
// Possible GitHub event names
export enum GitHubEvent {
Push = 'push',
PullRequest = 'pull_request',
}
// Directly available environment variables
export enum EnvVariable {
GitHubEventName = 'GITHUB_EVENT_NAME',
}
export const EnvVariableNames = new Set(Object.values(EnvVariable) as string[]);
export interface Inputs {
[EnvVariable.GitHubEventName]?: string;
[InputName.Key]?: string;
[InputName.RestoreKeys]?: string;
[InputName.Path]?: string;
[InputName.Caches]?: string;
[InputName.Bashlib]?: string;
[InputName.Run]?: string;
[InputName.Parallel]?: string;
}
export const DefaultInputs = {
[InputName.Caches]: '.github/workflows/caches.js',
[InputName.Bashlib]: '.github/workflows/bashlib.sh',
[InputName.Run]: 'default-setup-command',
} as Inputs;

View File

@ -0,0 +1,3 @@
import { run } from './setup';
run();

View File

@ -0,0 +1,61 @@
#!/bin/bash
# -----------------------------------------------
# Predefined command shortcuts
# -----------------------------------------------
# Exit on any command fails
set -e
bashSource=${BASH_SOURCE[${#BASH_SOURCE[@]} - 1]:-${(%):-%x}}
cacheScript="$(dirname $(dirname $(dirname $bashSource)))/dist/scripts/cache"
print-cachescript-path() {
echo $cacheScript
}
cache-restore() {
node $cacheScript restore $1
}
cache-save() {
node $cacheScript save $1
}
# install python packages
pip-install() {
cache-restore pip
echo "::group::Install Python pacakges"
pip install -r requirements.txt # install dependencies
pip install -e . # install current directory as editable python package
echo "::endgroup"
cache-save pip
}
# install npm packages
npm-install() {
cache-restore npm
echo "::group::Install npm pacakges"
echo "npm: $(npm --version)"
echo "node: $(node --version)"
npm ci
echo "::endgroup::"
cache-save npm
}
# install npm packages via yarn
yarn-install() {
cache-restore yarn
echo "::group::Install npm pacakges via yarn"
echo "npm: $(npm --version)"
echo "node: $(node --version)"
echo "yarn: $(yarn --version)"
yarn
echo "::endgroup::"
cache-save yarn
}
# default setup will install both pip and npm pacakges at the same time
default-setup-command() {
echo 'Please provide `run` commands or configure `default-setup-command`.'
exit 1
}

View File

@ -0,0 +1,18 @@
/**
* Runner script to store/save caches by predefined configs.
* Used in `scripts/bashlib.sh`.
*/
import { EnvVariable } from '../constants';
// To import `@actions/cache` modules safely, we must set GitHub event name to
// a invalid value, so actual runner code doesn't execute.
const originalEvent = process.env[EnvVariable.GitHubEventName];
process.env[EnvVariable.GitHubEventName] = 'CACHE_HACK';
import { run } from '../cache';
// then we restore the event name before the job actually runs
process.env[EnvVariable.GitHubEventName] = originalEvent;
// @ts-ignore
run(...process.argv.slice(2));

View File

@ -0,0 +1,66 @@
/**
* Load inputs and execute.
*/
import * as core from '@actions/core';
import { exec } from '@actions/exec';
import path from 'path';
import fs from 'fs';
import { DefaultInputs, InputName } from './constants';
import { getInput } from './utils/inputs';
const SHARED_BASHLIB = path.resolve(__dirname, '../src/scripts/bashlib.sh');
/**
* Run bash commands with predefined lib functions.
*
* @param {string} cmd - The bash commands to execute.
*/
export async function runCommand(
cmd: string,
extraBashlib: string,
): Promise<void> {
const bashlibCommands = [`source ${SHARED_BASHLIB}`];
if (extraBashlib) {
bashlibCommands.push(`source ${extraBashlib}`);
}
try {
await exec('bash', ['-c', [...bashlibCommands, cmd].join('\n ')]);
} catch (error) {
core.setFailed(error.message);
process.exit(1);
}
}
export async function run(): Promise<void> {
let bashlib = getInput(InputName.Bashlib);
const rawCommands = getInput(InputName.Run);
const runInParallel = getInput(InputName.Parallel);
if (!fs.existsSync(bashlib)) {
if (bashlib !== DefaultInputs[InputName.Bashlib]) {
core.error(`Custom bashlib "${bashlib}" does not exist.`);
}
// don't add bashlib to runCommand
bashlib = '';
}
if (runInParallel) {
// Attempt to split by two or more new lines first, if there is still only
// one command, attempt to split by one new line. This is because users
// asked for parallelization, so we make our best efforts to get multiple
// commands.
let commands = rawCommands.split(/\n{2,}/);
if (commands.length === 1) {
commands = rawCommands.split('\n');
}
core.debug(`>> Run ${commands.length} commands in parallel...`);
await Promise.all(
commands
.map(x => x.trim())
.filter(x => !!x)
.map(cmd => exports.runCommand(cmd, bashlib)),
);
} else if (rawCommands) {
await exports.runCommand(rawCommands, bashlib);
}
}

View File

@ -0,0 +1,2 @@
declare module '@actions/cache/dist/restore';
declare module '@actions/cache/dist/save';

View File

@ -0,0 +1,61 @@
/**
* Manage inputs and env variables.
*/
import * as core from '@actions/core';
import {
Inputs,
EnvVariableNames,
InputName,
DefaultInputs,
} from '../constants';
export function getInput(name: keyof Inputs): string {
const value = core.getInput(name);
if (name === InputName.Parallel) {
return value.toUpperCase() === 'TRUE' ? value : '';
}
return value || DefaultInputs[name] || '';
}
/**
* Update env variables associated with some inputs.
* See: https://github.com/actions/toolkit/blob/5b940ebda7e7b86545fe9741903c930bc1191eb0/packages/core/src/core.ts#L69-L77 .
*
* @param {Inputs} inputs - The new inputs to apply to the env variables.
*/
export function setInputs(inputs: Inputs): void {
for (const [name, value] of Object.entries(inputs)) {
const envName = EnvVariableNames.has(name)
? name
: `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
process.env[envName] = value;
}
}
/**
* Apply new inputs and execute a runner function, restore them when done.
*
* @param {Inputs} inputs - The new inputs to apply to the env variables before
* excuting the runner.
* @param {runner} runner - The runner function that returns a promise.
* @returns {Promise<any>} - The result from the runner function.
*/
export async function applyInputs(
inputs: Inputs,
runner: () => Promise<void>,
): Promise<any> {
const originalInputs: Inputs = Object.fromEntries(
Object.keys(inputs).map(name => [
name,
EnvVariableNames.has(name) ? process.env[name] : core.getInput(name),
]),
);
exports.setInputs(inputs);
const result = await runner();
exports.setInputs(originalInputs);
return result;
}
export function maybeArrayToString(input: string[] | string) {
return Array.isArray(input) ? input.join('\n') : input;
}

View File

@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"lib": ["esnext"],
"moduleResolution": "node",
"outDir": "./lib",
"rootDir": ".",
"strict": true,
"noImplicitAny": true,
"esModuleInterop": true,
"preserveSymlinks": true
},
"include": [
"./src",
"./node_modules/@actions"
],
"exclude": ["**/*.test.ts", "__tests__"]
}

View File

@ -0,0 +1,13 @@
FROM ruby:2.6.0
LABEL "com.github.actions.name"="Comment on PR"
LABEL "com.github.actions.description"="Leaves a comment on an open PR matching a push event."
LABEL "com.github.actions.repository"="https://github.com/unsplash/comment-on-pr"
LABEL "com.github.actions.maintainer"="Aaron Klaassen <aaron@unsplash.com>"
LABEL "com.github.actions.icon"="message-square"
LABEL "com.github.actions.color"="blue"
RUN gem install octokit
ADD entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

7
.github/actions/comment-on-pr/LICENSE vendored Normal file
View File

@ -0,0 +1,7 @@
Copyright 2019 Unsplash Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

27
.github/actions/comment-on-pr/README.md vendored Normal file
View File

@ -0,0 +1,27 @@
# Comment on PR via GitHub Action
A GitHub action to comment on the relevant open PR when a commit is pushed.
## Usage
- Requires the `GITHUB_TOKEN` secret.
- Requires the comment's message in the `msg` parameter.
- Supports `push` and `pull_request` event types.
### Sample workflow
```
name: comment-on-pr example
on: pull_request
jobs:
example:
name: sample comment
runs-on: ubuntu-latest
steps:
- name: comment PR
uses: unsplash/comment-on-pr@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
msg: "Check out this message!"
```

View File

@ -0,0 +1,15 @@
name: Comment on PR
author: Aaron Klaassen <aaron@unsplash.com>
description: Leaves a comment on an open PR matching a push event.
branding:
icon: 'message-square'
color: 'blue'
inputs:
msg:
description: Comment's message
required: true
runs:
using: 'docker'
image: 'Dockerfile'
args:
- ${{ inputs.msg }}

47
.github/actions/comment-on-pr/entrypoint.sh vendored Executable file
View File

@ -0,0 +1,47 @@
#!/usr/bin/env ruby
require "json"
require "octokit"
json = File.read(ENV.fetch("GITHUB_EVENT_PATH"))
event = JSON.parse(json)
github = Octokit::Client.new(access_token: ENV["GITHUB_TOKEN"])
if !ENV["GITHUB_TOKEN"]
puts "Missing GITHUB_TOKEN"
exit(1)
end
if ARGV.empty?
puts "Missing message argument."
exit(1)
end
repo = event["repository"]["full_name"]
if ENV.fetch("GITHUB_EVENT_NAME") == "pull_request"
pr_number = event["number"]
else
pulls = github.pull_requests(repo, state: "open")
push_head = event["after"]
pr = pulls.find { |pr| pr["head"]["sha"] == push_head }
if !pr
puts "Couldn't find an open pull request for branch with head at #{push_head}."
exit(1)
end
pr_number = pr["number"]
end
message = ARGV.join(' ')
coms = github.issue_comments(repo, pr_number)
duplicate = coms.find { |c| c["user"]["login"] == "github-actions[bot]" && c["body"] == message }
if duplicate
puts "The PR already contains a database change notification"
exit(0)
end
github.add_comment(repo, pr_number, message)

View File

@ -0,0 +1,55 @@
codecov:
notify:
require_ci_to_pass: yes
coverage:
notify:
slack:
default:
threshold: 1%
message: "Coverage {{changed}} for {{owner}}/{{repo}}" # customize the message
attachments: "sunburst, diff"
only_pulls: false
status:
src:
target: auto
threshold: 7%
base: auto
if_ci_failed: success
paths:
- src/
- '!src/tests/'
flags:
- src
test:
target: 60%
threshold: 10%
if_ci_failed: error
base: auto
paths:
- src/tests/
flags:
- test
precision: 2
round: down
range: "70...100"
flags:
src:
paths:
- src
- '!src/tests/'
test:
paths:
- src/tests/
parsers:
gcov:
branch_detection:
conditional: yes
loop: yes
method: no
macro: no
comment:
layout: "reach,diff,flags,tree"
behavior: default
require_changes: no

View File

@ -0,0 +1,73 @@
plugins:
- '@typescript-eslint'
- eslint-comments
- promise
- unicorn
extends:
- airbnb-typescript
- plugin:@typescript-eslint/recommended
- plugin:eslint-comments/recommended
- plugin:promise/recommended
- plugin:unicorn/recommended
- prettier
- prettier/@typescript-eslint
settings:
import/parsers:
'@typescript-eslint/parser':
- .ts
- .tsx
- .js
import/resolver:
typescript: {}
rules:
unicorn/filename-case: off
react/static-property-placement: 0
no-prototype-builtins: 0
import/prefer-default-export: 0
'@typescript-eslint/no-explicit-any': 0
import/no-default-export: error
no-use-before-define:
- error
-
functions: false
classes: true
variables: true
'@typescript-eslint/explicit-function-return-type':
- error
-
allowExpressions: true
allowTypedFunctionExpressions: true
'@typescript-eslint/no-use-before-define':
- error
-
functions: false
classes: true
variables: true
typedefs: true
'@typescript-eslint/indent':
- 2
- 2
unicorn/prevent-abbreviations: 0
import/no-extraneous-dependencies: [error, {devDependencies: ['**/*.ts']}]
parser: "@typescript-eslint/parser"
parserOptions:
project: ./tsconfig.json
ecmaVersion: 2019
sourceType: module
env:
node: true
browser: true
ignorePatterns:
- '*.js'
overrides:
- files: ['src/tests/**/*']
plugins:
- jest
extends:
- plugin:jest/recommended
rules:
global-require: 0
'@typescript-eslint/no-var-requires': 0
no-console: 0
'@typescript-eslint/no-unused-vars': 0
'@typescript-eslint/no-throw-literal': 0

View File

@ -0,0 +1,3 @@
# Contributing
The repository is released under the MIT license, and follows a standard Github development process, using Github tracker for issues and merging pull requests into master.

View File

@ -0,0 +1,17 @@
---
name: Bug report
about: Create a report to help us improve
---
**Describe the bug**
A clear and concise description of what the bug is.
**Workflow**
If applicable, provide a workflow file to help explain your problem.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Additional context**
Add any other context about the problem here.

View File

@ -0,0 +1,17 @@
---
name: Feature request
about: Suggest an idea for this project
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -0,0 +1,14 @@
### Type of Change
<!-- What type of change does your code introduce? -->
- [ ] New feature
- [ ] Bug fix
- [ ] Documentation
- [ ] Refactor
- [ ] Chore
### Resolves
- Fixes #[Add issue number here.]
### Describe Changes
<!-- Describe your changes in detail, if applicable. -->
_Describe what this Pull Request does_

View File

@ -0,0 +1 @@
.codecov.yml,.eslintignore,.eslintrc.json,.eslintrc.yml,.github/workflows/integration.yml,.github/workflows/pr.yml,.github/workflows/push.yml,.github/workflows/readme.md,.gitignore,.prettierignore,.prettierrc.json,.prettierrc.yml,.releaserc.yml,Makefile,README.md,__tests__/main.test.ts,action.yml,dist/index.js,jest.config.js,package.json,src/ChangedFiles.ts,src/File.ts,src/FilesHelper.ts,src/GithubHelper.ts,src/InputHelper.ts,src/UtilsHelper.ts,src/main.ts,src/tests/FilesHelper.test.ts,src/tests/GithubHelper.test.ts,src/tests/InputHelper.test.ts,src/tests/UtilsHelper.test.ts,src/tests/main.test.ts,src/tests/mocks/core/index.test.ts,src/tests/mocks/core/index.ts,src/tests/mocks/env/events/issue_comment_created.json,src/tests/mocks/env/events/issue_comment_edited.json,src/tests/mocks/env/events/pull_request_opened.json,src/tests/mocks/env/events/pull_request_reopened.json,src/tests/mocks/env/events/pull_request_synchronize.json,src/tests/mocks/env/events/push.json,src/tests/mocks/env/events/push_merge.json,src/tests/mocks/env/events/schedule.json,src/tests/mocks/env/index.test.ts,src/tests/mocks/env/index.ts,src/tests/mocks/fs/index.test.ts,src/tests/mocks/fs/index.ts,src/tests/mocks/github/index.test.ts,src/tests/mocks/github/index.ts,src/tests/mocks/octokit/endpoint/merge.test.ts,src/tests/mocks/octokit/endpoint/merge.ts,src/tests/mocks/octokit/index.test.ts,src/tests/mocks/octokit/index.ts,src/tests/mocks/octokit/paginate.test.ts,src/tests/mocks/octokit/paginate.ts,src/tests/mocks/octokit/payloads.ts,src/tests/mocks/octokit/pulls/listFiles.test.ts,src/tests/mocks/octokit/pulls/listFiles.ts,src/tests/mocks/octokit/repos/compareCommits.test.ts,src/tests/mocks/octokit/repos/compareCommits.ts,src/tests/payloads.ts,src/typings/ActionError/index.d.ts,src/typings/ChangedFiles/index.d.ts,src/typings/CoreMock/index.d.ts,src/typings/FsMock/index.d.ts,src/typings/GitHubFile/index.d.ts,src/typings/GitHubMock/index.d.ts,src/typings/Inferred/index.d.ts,src/typings/Inputs/index.d.ts,src/typings/OctokitMock/index.d.ts,src/typings/TestInput/index.d.ts,tsconfig.build.json,tsconfig.json,yarn.lock
1 .codecov.yml .eslintignore .eslintrc.json .eslintrc.yml .github/workflows/integration.yml .github/workflows/pr.yml .github/workflows/push.yml .github/workflows/readme.md .gitignore .prettierignore .prettierrc.json .prettierrc.yml .releaserc.yml Makefile README.md __tests__/main.test.ts action.yml dist/index.js jest.config.js package.json src/ChangedFiles.ts src/File.ts src/FilesHelper.ts src/GithubHelper.ts src/InputHelper.ts src/UtilsHelper.ts src/main.ts src/tests/FilesHelper.test.ts src/tests/GithubHelper.test.ts src/tests/InputHelper.test.ts src/tests/UtilsHelper.test.ts src/tests/main.test.ts src/tests/mocks/core/index.test.ts src/tests/mocks/core/index.ts src/tests/mocks/env/events/issue_comment_created.json src/tests/mocks/env/events/issue_comment_edited.json src/tests/mocks/env/events/pull_request_opened.json src/tests/mocks/env/events/pull_request_reopened.json src/tests/mocks/env/events/pull_request_synchronize.json src/tests/mocks/env/events/push.json src/tests/mocks/env/events/push_merge.json src/tests/mocks/env/events/schedule.json src/tests/mocks/env/index.test.ts src/tests/mocks/env/index.ts src/tests/mocks/fs/index.test.ts src/tests/mocks/fs/index.ts src/tests/mocks/github/index.test.ts src/tests/mocks/github/index.ts src/tests/mocks/octokit/endpoint/merge.test.ts src/tests/mocks/octokit/endpoint/merge.ts src/tests/mocks/octokit/index.test.ts src/tests/mocks/octokit/index.ts src/tests/mocks/octokit/paginate.test.ts src/tests/mocks/octokit/paginate.ts src/tests/mocks/octokit/payloads.ts src/tests/mocks/octokit/pulls/listFiles.test.ts src/tests/mocks/octokit/pulls/listFiles.ts src/tests/mocks/octokit/repos/compareCommits.test.ts src/tests/mocks/octokit/repos/compareCommits.ts src/tests/payloads.ts src/typings/ActionError/index.d.ts src/typings/ChangedFiles/index.d.ts src/typings/CoreMock/index.d.ts src/typings/FsMock/index.d.ts src/typings/GitHubFile/index.d.ts src/typings/GitHubMock/index.d.ts src/typings/Inferred/index.d.ts src/typings/Inputs/index.d.ts src/typings/OctokitMock/index.d.ts src/typings/TestInput/index.d.ts tsconfig.build.json tsconfig.json yarn.lock

View File

@ -0,0 +1,75 @@
[
".codecov.yml",
".eslintignore",
".eslintrc.json",
".eslintrc.yml",
".github/workflows/integration.yml",
".github/workflows/pr.yml",
".github/workflows/push.yml",
".github/workflows/readme.md",
".gitignore",
".prettierignore",
".prettierrc.json",
".prettierrc.yml",
".releaserc.yml",
"Makefile",
"README.md",
"__tests__/main.test.ts",
"action.yml",
"dist/index.js",
"jest.config.js",
"package.json",
"src/ChangedFiles.ts",
"src/File.ts",
"src/FilesHelper.ts",
"src/GithubHelper.ts",
"src/InputHelper.ts",
"src/UtilsHelper.ts",
"src/main.ts",
"src/tests/FilesHelper.test.ts",
"src/tests/GithubHelper.test.ts",
"src/tests/InputHelper.test.ts",
"src/tests/UtilsHelper.test.ts",
"src/tests/main.test.ts",
"src/tests/mocks/core/index.test.ts",
"src/tests/mocks/core/index.ts",
"src/tests/mocks/env/events/issue_comment_created.json",
"src/tests/mocks/env/events/issue_comment_edited.json",
"src/tests/mocks/env/events/pull_request_opened.json",
"src/tests/mocks/env/events/pull_request_reopened.json",
"src/tests/mocks/env/events/pull_request_synchronize.json",
"src/tests/mocks/env/events/push.json",
"src/tests/mocks/env/events/push_merge.json",
"src/tests/mocks/env/events/schedule.json",
"src/tests/mocks/env/index.test.ts",
"src/tests/mocks/env/index.ts",
"src/tests/mocks/fs/index.test.ts",
"src/tests/mocks/fs/index.ts",
"src/tests/mocks/github/index.test.ts",
"src/tests/mocks/github/index.ts",
"src/tests/mocks/octokit/endpoint/merge.test.ts",
"src/tests/mocks/octokit/endpoint/merge.ts",
"src/tests/mocks/octokit/index.test.ts",
"src/tests/mocks/octokit/index.ts",
"src/tests/mocks/octokit/paginate.test.ts",
"src/tests/mocks/octokit/paginate.ts",
"src/tests/mocks/octokit/payloads.ts",
"src/tests/mocks/octokit/pulls/listFiles.test.ts",
"src/tests/mocks/octokit/pulls/listFiles.ts",
"src/tests/mocks/octokit/repos/compareCommits.test.ts",
"src/tests/mocks/octokit/repos/compareCommits.ts",
"src/tests/payloads.ts",
"src/typings/ActionError/index.d.ts",
"src/typings/ChangedFiles/index.d.ts",
"src/typings/CoreMock/index.d.ts",
"src/typings/FsMock/index.d.ts",
"src/typings/GitHubFile/index.d.ts",
"src/typings/GitHubMock/index.d.ts",
"src/typings/Inferred/index.d.ts",
"src/typings/Inputs/index.d.ts",
"src/typings/OctokitMock/index.d.ts",
"src/typings/TestInput/index.d.ts",
"tsconfig.build.json",
"tsconfig.json",
"yarn.lock"
]

View File

@ -0,0 +1 @@
.codecov.yml .eslintignore .eslintrc.json .eslintrc.yml .github/workflows/integration.yml .github/workflows/pr.yml .github/workflows/push.yml .github/workflows/readme.md .gitignore .prettierignore .prettierrc.json .prettierrc.yml .releaserc.yml Makefile README.md __tests__/main.test.ts action.yml dist/index.js jest.config.js package.json src/ChangedFiles.ts src/File.ts src/FilesHelper.ts src/GithubHelper.ts src/InputHelper.ts src/UtilsHelper.ts src/main.ts src/tests/FilesHelper.test.ts src/tests/GithubHelper.test.ts src/tests/InputHelper.test.ts src/tests/UtilsHelper.test.ts src/tests/main.test.ts src/tests/mocks/core/index.test.ts src/tests/mocks/core/index.ts src/tests/mocks/env/events/issue_comment_created.json src/tests/mocks/env/events/issue_comment_edited.json src/tests/mocks/env/events/pull_request_opened.json src/tests/mocks/env/events/pull_request_reopened.json src/tests/mocks/env/events/pull_request_synchronize.json src/tests/mocks/env/events/push.json src/tests/mocks/env/events/push_merge.json src/tests/mocks/env/events/schedule.json src/tests/mocks/env/index.test.ts src/tests/mocks/env/index.ts src/tests/mocks/fs/index.test.ts src/tests/mocks/fs/index.ts src/tests/mocks/github/index.test.ts src/tests/mocks/github/index.ts src/tests/mocks/octokit/endpoint/merge.test.ts src/tests/mocks/octokit/endpoint/merge.ts src/tests/mocks/octokit/index.test.ts src/tests/mocks/octokit/index.ts src/tests/mocks/octokit/paginate.test.ts src/tests/mocks/octokit/paginate.ts src/tests/mocks/octokit/payloads.ts src/tests/mocks/octokit/pulls/listFiles.test.ts src/tests/mocks/octokit/pulls/listFiles.ts src/tests/mocks/octokit/repos/compareCommits.test.ts src/tests/mocks/octokit/repos/compareCommits.ts src/tests/payloads.ts src/typings/ActionError/index.d.ts src/typings/ChangedFiles/index.d.ts src/typings/CoreMock/index.d.ts src/typings/FsMock/index.d.ts src/typings/GitHubFile/index.d.ts src/typings/GitHubMock/index.d.ts src/typings/Inferred/index.d.ts src/typings/Inputs/index.d.ts src/typings/OctokitMock/index.d.ts src/typings/TestInput/index.d.ts tsconfig.build.json tsconfig.json yarn.lock

View File

@ -0,0 +1 @@
.codecov.yml,.eslintrc.yml,.prettierrc.yml,.releaserc.yml,src/FilesHelper.ts,src/GithubHelper.ts,src/InputHelper.ts,src/UtilsHelper.ts,src/tests/FilesHelper.test.ts,src/tests/GithubHelper.test.ts,src/tests/InputHelper.test.ts,src/tests/UtilsHelper.test.ts,src/tests/main.test.ts,src/tests/mocks/core/index.test.ts,src/tests/mocks/core/index.ts,src/tests/mocks/env/events/issue_comment_created.json,src/tests/mocks/env/events/issue_comment_edited.json,src/tests/mocks/env/events/pull_request_opened.json,src/tests/mocks/env/events/pull_request_reopened.json,src/tests/mocks/env/events/pull_request_synchronize.json,src/tests/mocks/env/events/push.json,src/tests/mocks/env/events/push_merge.json,src/tests/mocks/env/events/schedule.json,src/tests/mocks/env/index.test.ts,src/tests/mocks/env/index.ts,src/tests/mocks/fs/index.test.ts,src/tests/mocks/fs/index.ts,src/tests/mocks/github/index.test.ts,src/tests/mocks/github/index.ts,src/tests/mocks/octokit/endpoint/merge.test.ts,src/tests/mocks/octokit/endpoint/merge.ts,src/tests/mocks/octokit/index.test.ts,src/tests/mocks/octokit/index.ts,src/tests/mocks/octokit/paginate.test.ts,src/tests/mocks/octokit/paginate.ts,src/tests/mocks/octokit/payloads.ts,src/tests/mocks/octokit/pulls/listFiles.test.ts,src/tests/mocks/octokit/pulls/listFiles.ts,src/tests/mocks/octokit/repos/compareCommits.test.ts,src/tests/mocks/octokit/repos/compareCommits.ts,src/tests/payloads.ts,src/typings/ActionError/index.d.ts,src/typings/ChangedFiles/index.d.ts,src/typings/CoreMock/index.d.ts,src/typings/FsMock/index.d.ts,src/typings/GitHubFile/index.d.ts,src/typings/GitHubMock/index.d.ts,src/typings/Inferred/index.d.ts,src/typings/Inputs/index.d.ts,src/typings/OctokitMock/index.d.ts,src/typings/TestInput/index.d.ts,tsconfig.build.json
1 .codecov.yml .eslintrc.yml .prettierrc.yml .releaserc.yml src/FilesHelper.ts src/GithubHelper.ts src/InputHelper.ts src/UtilsHelper.ts src/tests/FilesHelper.test.ts src/tests/GithubHelper.test.ts src/tests/InputHelper.test.ts src/tests/UtilsHelper.test.ts src/tests/main.test.ts src/tests/mocks/core/index.test.ts src/tests/mocks/core/index.ts src/tests/mocks/env/events/issue_comment_created.json src/tests/mocks/env/events/issue_comment_edited.json src/tests/mocks/env/events/pull_request_opened.json src/tests/mocks/env/events/pull_request_reopened.json src/tests/mocks/env/events/pull_request_synchronize.json src/tests/mocks/env/events/push.json src/tests/mocks/env/events/push_merge.json src/tests/mocks/env/events/schedule.json src/tests/mocks/env/index.test.ts src/tests/mocks/env/index.ts src/tests/mocks/fs/index.test.ts src/tests/mocks/fs/index.ts src/tests/mocks/github/index.test.ts src/tests/mocks/github/index.ts src/tests/mocks/octokit/endpoint/merge.test.ts src/tests/mocks/octokit/endpoint/merge.ts src/tests/mocks/octokit/index.test.ts src/tests/mocks/octokit/index.ts src/tests/mocks/octokit/paginate.test.ts src/tests/mocks/octokit/paginate.ts src/tests/mocks/octokit/payloads.ts src/tests/mocks/octokit/pulls/listFiles.test.ts src/tests/mocks/octokit/pulls/listFiles.ts src/tests/mocks/octokit/repos/compareCommits.test.ts src/tests/mocks/octokit/repos/compareCommits.ts src/tests/payloads.ts src/typings/ActionError/index.d.ts src/typings/ChangedFiles/index.d.ts src/typings/CoreMock/index.d.ts src/typings/FsMock/index.d.ts src/typings/GitHubFile/index.d.ts src/typings/GitHubMock/index.d.ts src/typings/Inferred/index.d.ts src/typings/Inputs/index.d.ts src/typings/OctokitMock/index.d.ts src/typings/TestInput/index.d.ts tsconfig.build.json

View File

@ -0,0 +1,54 @@
[
".codecov.yml",
".eslintrc.yml",
".prettierrc.yml",
".releaserc.yml",
"src/FilesHelper.ts",
"src/GithubHelper.ts",
"src/InputHelper.ts",
"src/UtilsHelper.ts",
"src/tests/FilesHelper.test.ts",
"src/tests/GithubHelper.test.ts",
"src/tests/InputHelper.test.ts",
"src/tests/UtilsHelper.test.ts",
"src/tests/main.test.ts",
"src/tests/mocks/core/index.test.ts",
"src/tests/mocks/core/index.ts",
"src/tests/mocks/env/events/issue_comment_created.json",
"src/tests/mocks/env/events/issue_comment_edited.json",
"src/tests/mocks/env/events/pull_request_opened.json",
"src/tests/mocks/env/events/pull_request_reopened.json",
"src/tests/mocks/env/events/pull_request_synchronize.json",
"src/tests/mocks/env/events/push.json",
"src/tests/mocks/env/events/push_merge.json",
"src/tests/mocks/env/events/schedule.json",
"src/tests/mocks/env/index.test.ts",
"src/tests/mocks/env/index.ts",
"src/tests/mocks/fs/index.test.ts",
"src/tests/mocks/fs/index.ts",
"src/tests/mocks/github/index.test.ts",
"src/tests/mocks/github/index.ts",
"src/tests/mocks/octokit/endpoint/merge.test.ts",
"src/tests/mocks/octokit/endpoint/merge.ts",
"src/tests/mocks/octokit/index.test.ts",
"src/tests/mocks/octokit/index.ts",
"src/tests/mocks/octokit/paginate.test.ts",
"src/tests/mocks/octokit/paginate.ts",
"src/tests/mocks/octokit/payloads.ts",
"src/tests/mocks/octokit/pulls/listFiles.test.ts",
"src/tests/mocks/octokit/pulls/listFiles.ts",
"src/tests/mocks/octokit/repos/compareCommits.test.ts",
"src/tests/mocks/octokit/repos/compareCommits.ts",
"src/tests/payloads.ts",
"src/typings/ActionError/index.d.ts",
"src/typings/ChangedFiles/index.d.ts",
"src/typings/CoreMock/index.d.ts",
"src/typings/FsMock/index.d.ts",
"src/typings/GitHubFile/index.d.ts",
"src/typings/GitHubMock/index.d.ts",
"src/typings/Inferred/index.d.ts",
"src/typings/Inputs/index.d.ts",
"src/typings/OctokitMock/index.d.ts",
"src/typings/TestInput/index.d.ts",
"tsconfig.build.json"
]

View File

@ -0,0 +1 @@
.codecov.yml .eslintrc.yml .prettierrc.yml .releaserc.yml src/FilesHelper.ts src/GithubHelper.ts src/InputHelper.ts src/UtilsHelper.ts src/tests/FilesHelper.test.ts src/tests/GithubHelper.test.ts src/tests/InputHelper.test.ts src/tests/UtilsHelper.test.ts src/tests/main.test.ts src/tests/mocks/core/index.test.ts src/tests/mocks/core/index.ts src/tests/mocks/env/events/issue_comment_created.json src/tests/mocks/env/events/issue_comment_edited.json src/tests/mocks/env/events/pull_request_opened.json src/tests/mocks/env/events/pull_request_reopened.json src/tests/mocks/env/events/pull_request_synchronize.json src/tests/mocks/env/events/push.json src/tests/mocks/env/events/push_merge.json src/tests/mocks/env/events/schedule.json src/tests/mocks/env/index.test.ts src/tests/mocks/env/index.ts src/tests/mocks/fs/index.test.ts src/tests/mocks/fs/index.ts src/tests/mocks/github/index.test.ts src/tests/mocks/github/index.ts src/tests/mocks/octokit/endpoint/merge.test.ts src/tests/mocks/octokit/endpoint/merge.ts src/tests/mocks/octokit/index.test.ts src/tests/mocks/octokit/index.ts src/tests/mocks/octokit/paginate.test.ts src/tests/mocks/octokit/paginate.ts src/tests/mocks/octokit/payloads.ts src/tests/mocks/octokit/pulls/listFiles.test.ts src/tests/mocks/octokit/pulls/listFiles.ts src/tests/mocks/octokit/repos/compareCommits.test.ts src/tests/mocks/octokit/repos/compareCommits.ts src/tests/payloads.ts src/typings/ActionError/index.d.ts src/typings/ChangedFiles/index.d.ts src/typings/CoreMock/index.d.ts src/typings/FsMock/index.d.ts src/typings/GitHubFile/index.d.ts src/typings/GitHubMock/index.d.ts src/typings/Inferred/index.d.ts src/typings/Inputs/index.d.ts src/typings/OctokitMock/index.d.ts src/typings/TestInput/index.d.ts tsconfig.build.json

View File

@ -0,0 +1 @@
.github/workflows/integration.yml,.github/workflows/pr.yml,.github/workflows/push.yml,.github/workflows/readme.md,.gitignore,.prettierignore,README.md,action.yml,jest.config.js,package.json,src/main.ts,tsconfig.json,yarn.lock
1 .github/workflows/integration.yml .github/workflows/pr.yml .github/workflows/push.yml .github/workflows/readme.md .gitignore .prettierignore README.md action.yml jest.config.js package.json src/main.ts tsconfig.json yarn.lock

View File

@ -0,0 +1,15 @@
[
".github/workflows/integration.yml",
".github/workflows/pr.yml",
".github/workflows/push.yml",
".github/workflows/readme.md",
".gitignore",
".prettierignore",
"README.md",
"action.yml",
"jest.config.js",
"package.json",
"src/main.ts",
"tsconfig.json",
"yarn.lock"
]

View File

@ -0,0 +1 @@
.github/workflows/integration.yml .github/workflows/pr.yml .github/workflows/push.yml .github/workflows/readme.md .gitignore .prettierignore README.md action.yml jest.config.js package.json src/main.ts tsconfig.json yarn.lock

View File

@ -0,0 +1 @@
.eslintignore,.eslintrc.json,.prettierrc.json,Makefile,__tests__/main.test.ts,dist/index.js,src/ChangedFiles.ts,src/File.ts
1 .eslintignore .eslintrc.json .prettierrc.json Makefile __tests__/main.test.ts dist/index.js src/ChangedFiles.ts src/File.ts

View File

@ -0,0 +1,10 @@
[
".eslintignore",
".eslintrc.json",
".prettierrc.json",
"Makefile",
"__tests__/main.test.ts",
"dist/index.js",
"src/ChangedFiles.ts",
"src/File.ts"
]

View File

@ -0,0 +1 @@
.eslintignore .eslintrc.json .prettierrc.json Makefile __tests__/main.test.ts dist/index.js src/ChangedFiles.ts src/File.ts

View File

@ -0,0 +1,157 @@
json_output='["functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda.json", "functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json", "functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json", "functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json"]'
csv_output="functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda.json,functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json,functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json,functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json"
txt_hard_output='functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda.json_<br />&nbsp;&nbsp;_functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json_<br />&nbsp;&nbsp;_functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json_<br />&nbsp;&nbsp;_functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json'
txt_output='functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda.json functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json functions/twitch-sadako/webhookSubscribeLambda/test/webhookSubscribeLambda_post.json'
testOutput () {
# read from var
if [ "${2}" == "json" ]; then
local output_length=$(echo "${1}" | jq '. | length')
elif [ "${2}" == "," ]; then
local output_length=$(awk -F"${2}" '{print NF-1}' <<< $(echo "${1}"))
else
local output_length=$(awk -F"${2}" '{print NF-1}' <<< $(echo "${1}"))
fi
echo "$output_length"
}
testFile () {
# read from file
if [ "${2}" == "json" ]; then
local file_length=$(jq -r '. | length' ${file}.json)
elif [ "${2}" == "," ]; then
local file_length=$(cat ${file}.csv | awk -F"${2}" '{print NF-1}')
else
local file_length=$(cat ${file}.txt | awk -F"${2}" '{print NF-1}')
fi
echo "$file_length"
}
cleanTest () {
rm -rf $1.json $1.csv $1.txt
}
prepareTest () {
# if prefix is simple setup test var and file
if [ "$1" == "simple_" ]; then
# declare a var named simple_FILE
if [ "$dev" == "dev" ]; then
local file_prefix="events/"
else
local file_prefix=""
fi
declare -n file=${1}${2}
if [ "$3" == "json" ]; then
echo ${json_output} > "${file_prefix}${!file}.json"
elif [ "$3" == "," ]; then
echo ${csv_output} > "${file_prefix}${!file}.csv"
elif [ "$3" == "_<br />&nbsp;&nbsp;_" ]; then
echo ${txt_hard_output} > "${file_prefix}${!file}.txt"
else
echo ${txt_output} > "${file_prefix}${!file}.txt"
fi
if [ "$4" == "json" ]; then
file=$json_output
elif [ "$4" == "," ]; then
file=$csv_output
elif [ "$4" == "_<br />&nbsp;&nbsp;_" ]; then
file=$txt_hard_output
else
file=$txt_output
fi
else
declare -n file=${2}
if [ "$dev" == "dev" ]; then
if [ "$4" == "json" ]; then
file="$(cat events/${!file}.json)"
elif [ "$4" == "," ]; then
file="$(cat events/${!file}.csv)"
else
file="$(cat events/${!file}.txt)"
fi
fi
fi
echo "${file}"
}
testResults () {
if [ "$1" == 'simple_' ]; then
expected=3
if [ "$2" == 'json' ]; then
expected=$(($expected+1))
fi
# echo $result
if [ "$3" != "$expected" ]; then
echo -e "\t\033[1;91mTest failure $5/($1)$4:'$2' { EXPECTED:$expected RECEIVED:$3 } \033[0m"
exit 1;
fi
else
if [ "$4" == 'files' ]; then
expected=72
elif [ "$4" == 'files_added' ]; then
expected=51
elif [ "$4" == 'files_modified' ]; then
expected=12
elif [ "$4" == 'files_removed' ]; then
expected=7
fi
if [ "$2" == 'json' ]; then
expected=$(($expected+1))
fi
if [ "$3" != "$expected" ]; then
echo -e "\t\033[1;91mTest failure $5/($1)$4:'$2' { EXPECTED:$expected RECEIVED:$3 } \033[0m"
exit 1;
fi
fi
echo -e "\t\033[1;92mTest success $5/($1)$4:'$2' { $expected == $3 } \033[0m"
}
runTest () {
for prefix in "simple_" "real"; do \
file=${1}
if [ "$prefix" == 'simple_' ]; then
if [ "$dev" == "dev" ]; then
file=events/${prefix}${1}
else
file=${prefix}${1}
fi
elif [ "$prefix" != 'simple_' ] && [ "$dev" == "dev" ]; then
file=events/${1}
fi
input="$(prepareTest $prefix $1 "$2" "$3")"
local file_length=$(testFile $file "${2}")
local output_length=$(testOutput "${input}" "${3}")
testResults $prefix "${2}" "$file_length" "$1" "fileOutput"
testResults $prefix "${3}" "$output_length" "$1" "output"
if [ "$prefix" == 'simple_' ]; then
cleanTest $file
fi
done
}
test () {
if [ "$dev" == "dev" ]; then
echo -e "\t\033[1;91mDEV MODE\033[0m"
fi
if [ "$output" == "" ] || [ "$fileOutput" == "" ]; then
for fileOutput in "json" "," " "; do \
echo -e "\033[1;92mFILEOUTPUT:'$fileOutput'\033[0m"
for output in "json" "," " "; do \
echo -e "\033[1;92mOUTPUT:'$output'\033[0m"
for file in "files" "files_modified" "files_added" "files_removed"; do \
echo -e "\033[1;92mFILE:'$file'\033[0m"
runTest $file "$fileOutput" "$output"
done
done
done
else
for file in "files" "files_modified" "files_added" "files_removed"; do \
echo -e "\033[1;92mFILE:'$file' with FILEOUTPUT:'$fileOutput' OUTPUT:'$output'\033[0m"
runTest $file "$fileOutput" "$output"
done
fi
}
dev=$1
test

View File

@ -0,0 +1,26 @@
# Set to true to add reviewers to pull requests
addReviewers: true
# Set to true to add assignees to pull requests
addAssignees: author
# A list of reviewers to be added to pull requests (GitHub user name)
reviewers:
- trilom
# A number of reviewers added to the pull request
# Set 0 to add all the reviewers (default: 0)
numberOfReviewers: 0
# A list of assignees, overrides reviewers if set
# assignees:
# - assigneeA
# A number of assignees to add to the pull request
# Set to 0 to add all of the assignees.
# Uses numberOfReviewers if unset.
# numberOfAssignees: 2
# A list of keywords to be skipped the process that add reviewers if pull requests include it
# skipKeywords:
# - wip

View File

@ -0,0 +1,42 @@
- name: pretty
description: Code that has been linted with eslint and prettier
color: 76edd1
- name: builds
description: Code that builds with yarn and tsc
color: 39bc44
- name: tested-unit
description: Code that has passed unit tests with jest
color: 9520bc
- name: tested-integration
description: Code that has passed integration tests with jest
color: fc5aee
- name: "doesnt read directions"
description: "Doesn't know how to read directions, please PR to develop"
color: d876e3
- name: automated pr
description: This was created by create-pull-request action
color: b9ff9b
- name: released
description: This has been released to NPM, Github Packages, and Actions Marketplace
color: ededed
- name: bug
description: Something isn't working
color: d73a4a
- name: duplicate
description: This issue or pull request already exists
color: cfd3d7
- name: enhancement
description: New feature or request
color: a2eeef
- name: "automated merge"
description: This was merged automatically
color: c2e0c6
- name: "hold merge"
description: This merge will be blocked from automerging until this label is removed
color: b60205
- name: lintdogged
description: Code that has been looked at by reviewdog
color: 5F422D
- name: failure
description: Something bad happened...
color: d93f0b

View File

@ -0,0 +1,97 @@
# this will tag PRs that are ready for release and automerge them
name: Automerge Pull Requests
on:
# issue_comment:
# types: [created]
pull_request:
branches: [master, next, alpha, beta]
types: [labeled, closed]
jobs:
automerge:
name: automerge pr
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
pr_number: ${{ format('{0}{1}', github.event.pull_request.number, github.event.issue.number) }}
# if event type is non fork PR or comment on PR from trilom with '/release'
if: >-
(
github.event_name == 'pull_request'
&& github.event.pull_request.head.repo.full_name == github.repository
&& contains(github.event.pull_request.labels.*.name, 'pretty')
&& contains(github.event.pull_request.labels.*.name, 'builds')
&& contains(github.event.pull_request.labels.*.name, 'tested-unit')
&& contains(github.event.pull_request.labels.*.name, 'tested-integration')
&& contains(github.event.pull_request.labels.*.name, 'lintdogged')
&& ! contains(github.event.pull_request.labels.*.name, 'automated merge')
&& ! contains(github.event.pull_request.labels.*.name, 'hold merge')
) || (
github.event_name == 'issue_comment'
&& github.event.issue.pull_request != ''
&& contains(github.event.comment.body, '/release')
&& github.actor == 'trilom'
&& contains(github.event.issue.labels.*.name, 'pretty')
&& contains(github.event.issue.labels.*.name, 'builds')
&& contains(github.event.issue.labels.*.name, 'tested-unit')
&& contains(github.event.issue.labels.*.name, 'tested-integration')
&& contains(github.event.issue.labels.*.name, 'lintdogged')
&& ! contains(github.event.issue.labels.*.name, 'automated merge')
&& ! contains(github.event.issue.labels.*.name, 'hold merge'))
steps:
- name: if pretty, builds, tested merge automerge pr
# if pretty, builds, and tested labels then merge
uses: pascalgn/automerge-action@v0.7.5
env:
GITHUB_TOKEN: ${{ env.GITHUB_TOKEN }}
MERGE_METHOD: merge
# this breaks the /release on issue_comment portion unless I get the head.ref from github-script
MERGE_COMMIT_MESSAGE: 'Auto merge from ${{ github.event.pull_request.head.ref }} PR#{pullRequest.number}: {pullRequest.title}'
UPDATE_METHOD: merge
MERGE_LABELS: 'pretty,builds,tested-unit,tested-integration,lintdogged'
UPDATE_LABELS: ''
# if failure, get payload of PR and notify
- name: if failure, get pr payload
uses: actions/github-script@0.8.0
id: pr_json
if: failure()
with:
github-token: ${{env.GITHUB_TOKEN}}
script: |
const result = await github.pulls.get({
owner: '${{ github.repository }}'.split('/')[0],
repo: '${{ github.repository }}'.split('/')[1],
pull_number: ${{ env.pr_number }}
})
return result.data;
- name: if failure, set pr payload outputs
if: failure()
id: pr
run: |
echo '${{ steps.pr_json.outputs.result }}' > pr.json
echo "::set-output name=user::$( jq -r '.user.login' pr.json )"
echo "::set-output name=head::$( jq -r '.head.repo.full_name' pr.json )"
echo "::set-output name=head_url::$( jq -r '.head.repo.html_url' pr.json )"
echo "::set-output name=base::$( jq -r '.base.repo.full_name' pr.json )"
echo "::set-output name=base_url::$( jq -r '.base.repo.html_url' pr.json )"
- name: if failure, notify
uses: peter-evans/create-or-update-comment@v1
if: failure()
with:
token: ${{ env.GITHUB_TOKEN }}
issue-number: ${{ env.pr_number }}
body: |
@${{ steps.pr.outputs.user }}, @trilom - it appears that there was an issue with the merge.
Head Repo/Branch: **[${{ steps.pr.outputs.head }}]**(${{ steps.pr.outputs.head_url }}) merge into **[${{ steps.pr.outputs.base }}]**(${{ steps.pr.outputs.base_url }})
## Event JSON
```json
${{ toJSON(steps.pr_json.outputs.result)}}
```
- uses: actions/github-script@0.6.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['automated merge']
})

View File

@ -0,0 +1,19 @@
name: Close Pull Request
on:
pull_request:
branches-ignore: [master]
types: [opened, reopened]
jobs:
# close any fork PRs not opened by trilom to anything but master
close_pr:
name: close non master PRs from fork
runs-on: ubuntu-latest
if: (github.actor != 'trilom' || github.actor != 'trilom-bot') && github.event.pull_request.head.repo.full_name != github.repository
steps:
- uses: superbrothers/close-pull-request@v2
with:
comment: Please merge your code into master, this will trigger the desired merge workflow.
- uses: actions/github@v1.0.0
if: success()
with:
args: label "doesnt read directions"

View File

@ -0,0 +1,106 @@
name: Integration Tests
on:
issue_comment:
types:
- created
schedule:
- cron: '0 0 * * *'
pull_request:
branches: [master]
push:
branches: [master]
jobs:
# always_job:
# name: Always run job
# runs-on: ubuntu-latest
# steps:
# - name: dump env
# env:
# GITHUB_CONTEXT: ${{ toJson(github) }}
# JOB_CONTEXT: ${{ toJson(job) }}
# STEPS_CONTEXT: ${{ toJson(steps) }}
# RUNNER_CONTEXT: ${{ toJson(runner) }}
# STRATEGY_CONTEXT: ${{ toJson(strategy) }}
# MATRIX_CONTEXT: ${{ toJson(matrix) }}
# run: |
# echo "GITHUB_EVENT_PATH\n$GITHUB_EVENT_PATH"
# echo "GITHUB_CONTEXT\n$GITHUB_CONTEXT"
# echo "JOB_CONTEXT\n$JOB_CONTEXT"
# echo "STEPS_CONTEXT\n$STEPS_CONTEXT"
# echo "RUNNER_CONTEXT\n$RUNNER_CONTEXT"
# echo "STRATEGY_CONTEXT\n$STRATEGY_CONTEXT"
# echo "MATRIX_CONTEXT\n$MATRIX_CONTEXT"
integration:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
event_type: ['push', 'pull_request']
output: ['json', ',', ' ', '_<br />&nbsp;&nbsp;_']
fileOutput: ['json', ',', ' ', '_<br />&nbsp;&nbsp;_']
if: >-
( startsWith(github.head_ref, '1.')
|| startsWith(github.head_ref, '2.'))
||
contains(github.event.head_commit.message, 'Release merge from')
||
github.event_name == 'schedule'
|| (
github.event_name == 'issue_comment'
&& github.event.issue.number != ''
&& contains(github.event.comment.body, '/integration')
&& github.actor == 'trilom')
steps:
# get pr number if exists
- id: pr
if: github.event_name == 'issue_comment'
run: |
pr=$(echo "${{github.event.comment.body}}" | sed 's|.*/integration||') &&
echo "::set-output name=pr::${pr}"
env:
comment: ${{ toJson(github) }}
# use pr number from integration command
- uses: actions/checkout@v2
if: github.event_name == 'issue_comment' && steps.pr.outputs.pr != ''
with:
ref: ${{format('refs/pull/{0}/head', steps.pr.outputs.pr )}}
# use the issue number if pr is blank
- uses: actions/checkout@v2
if: github.event_name == 'issue_comment' && steps.pr.outputs.pr == '' && github.event.issue.pull_request != ''
with:
ref: ${{format('refs/pull/{0}/head', github.event.issue.number )}}
- name: fail if no PR number and issue comment
if: github.event_name == 'issue_comment' && steps.pr.outputs.pr == '' && github.event.issue.pull_request == ''
run: |
echo "Please provide a PR number to use like /integration13 for PR# 13."
exit 1
- uses: actions/checkout@v2
if: github.event_name != 'issue_comment'
- run: yarn build-package
- uses: ./
id: file_changes_build_pr
if: matrix.event_type == 'pull_request'
with:
prNumber: 83
output: ${{ matrix.output }}
fileOutput: ${{ matrix.fileOutput }}
- uses: ./
id: file_changes_build_push
if: matrix.event_type == 'push'
with:
pushBefore: 6ac7697cd1c4f23a08d4d4edbe7dab06b34c58a2
pushAfter: 4ee1a1a2515f4ac1b90a56aaeb060b97f20c8968
output: ${{ matrix.output }}
fileOutput: ${{ matrix.fileOutput }}
- run: |
mv $HOME/files* .
chmod +x test.sh && ./test.sh
working-directory: .github/actions/integration
if: success()
env:
fileOutput: ${{ matrix.fileOutput }}
output: ${{ matrix.output }}
files: ${{ format('{0}{1}', steps.file_changes_build_pr.outputs.files, steps.file_changes_build_push.outputs.files ) }}
files_modified: ${{ format('{0}{1}', steps.file_changes_build_pr.outputs.files_modified, steps.file_changes_build_push.outputs.files_modified ) }}
files_added: ${{ format('{0}{1}', steps.file_changes_build_pr.outputs.files_added, steps.file_changes_build_push.outputs.files_added ) }}
files_removed: ${{ format('{0}{1}', steps.file_changes_build_pr.outputs.files_removed, steps.file_changes_build_push.outputs.files_removed ) }}

View File

@ -0,0 +1,13 @@
name: Sync labels
on:
push:
branches: [master]
paths: [.github/labels.yml]
jobs:
make-labels:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: micnncim/action-label-syncer@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,192 @@
name: Contribution Workflow
env:
isFork: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
on: [pull_request]
jobs:
add-reviews:
runs-on: ubuntu-latest
steps:
- uses: kentaro-m/auto-assign-action@v1.1.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
# make sure we can build
build:
name: yarn install && tsc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: yarn build
- uses: actions/github-script@0.6.0
if: failure() && contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
if ('${{ contains(github.event.pull_request.labels.*.name, 'builds') }}' == 'true') {
github.issues.removeLabel({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
name: 'builds'
})
}
- uses: actions/github-script@0.6.0
if: contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['builds']
})
# unit test with jest
test-unit:
name: jest unit tests
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v2
- run: yarn build
- run: yarn test-coverage
- run: bash <(curl -s https://codecov.io/bash)
if: contains(env.isFork, 'false')
- uses: actions/github-script@0.6.0
if: failure() && contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
if ('${{ contains(github.event.pull_request.labels.*.name, 'tested-unit') }}' == 'true') {
github.issues.removeLabel({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
name: 'tested-unit'
})
}
- uses: actions/github-script@0.6.0
if: contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['tested-unit']
})
# integration test with jest
test-integration:
name: jest integration tests
runs-on: ubuntu-latest
needs: test-unit
steps:
- uses: actions/checkout@v2
- run: yarn build
- run: yarn test-integration
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
- uses: actions/github-script@0.6.0
if: failure() && contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
if ('${{ contains(github.event.pull_request.labels.*.name, 'tested-integration') }}' == 'true') {
github.issues.removeLabel({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
name: 'tested-integration'
})
}
- uses: actions/github-script@0.6.0
if: contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['tested-integration']
})
# lint code in github check
lintdog-fork:
name: eslintdog (reviewdog)
runs-on: ubuntu-latest
needs: build
if: github.event.pull_request.head.repo.full_name != github.repository
steps:
- uses: actions/checkout@v2
- run: yarn build
- name: Lint and report
uses: reviewdog/action-eslint@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-check
eslint_flags: '--ext .ts ./'
# lint code and comment back if possible
lintdog:
name: eslintdog (reviewdog)
runs-on: ubuntu-latest
needs: build
if: github.event.pull_request.head.repo.full_name == github.repository
steps:
- uses: actions/checkout@v2
- run: yarn build
- name: Lint and report
uses: reviewdog/action-eslint@v1
with:
github_token: ${{ secrets.TRILOM_BOT_TOKEN }}
reporter: github-pr-review
eslint_flags: '--ext .ts ./'
- uses: actions/github-script@0.6.0
if: failure()
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
if ('${{ contains(github.event.pull_request.labels.*.name, 'lintdogged') }}' == 'true') {
github.issues.removeLabel({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
name: 'lintdogged'
})
}
- uses: actions/github-script@0.6.0
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['lintdogged']
})
# format and push code back if not forked branch
format_check_push:
name: prettier
runs-on: ubuntu-latest
needs: [lintdog, lintdog-fork]
if: always()
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
steps:
- uses: actions/checkout@v2 # checkout for forks
if: contains(env.isFork, 'true')
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v2 # checkout for PR
if: contains(env.isFork, 'false')
with:
token: ${{ secrets.TRILOM_BOT_TOKEN }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
- run: yarn build
- run: yarn format-check
- name: yarn format and push code if check failed
if: failure() && github.actor != 'trilom-bot' && contains(env.isFork, 'false')
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
run: |
yarn format
sudo yarn clean
git config --local user.email "trilom-bot@trailmix.me"
git config --local user.name "trilom-bot"
git add -A
git diff-index --quiet HEAD || git commit -m "Adding format changes 🤖" -a
git push https://x-access-token:${GITHUB_TOKEN}@github.com/${{ github.repository }}.git HEAD:refs/heads/${{ github.head_ref }} && exit 0
- uses: actions/github-script@0.6.0
if: failure() && contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
if ('${{ contains(github.event.pull_request.labels.*.name, 'pretty') }}' == 'true') {
github.issues.removeLabel({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
name: 'pretty'
})
}
- uses: actions/github-script@0.6.0
if: contains(env.isFork, 'false')
with:
github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
script: |
github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number,
labels: ['pretty']
})

View File

@ -0,0 +1,259 @@
# if a push comes in then this will test it for a release and create a release PR if needed
name: Push to release branches
on:
push:
branches: [master, next, alpha, beta]
tags-ignore: ['**']
jobs:
# semantic release an auto-merged branch to github package repo, npm, github actions
release:
name: Release to NPM, Github, Github Actions Marketplace
runs-on: ubuntu-latest
needs: [build, test-unit, test-integration, lintdog]
if: >
github.actor != 'semantic-release-bot'
&& ( (contains(github.event.head_commit.message, 'trilom/1.')
|| contains(github.event.head_commit.message, 'trilom/2.'))
&& ! contains(github.event.head_commit.message, 'chore(release):'))
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
SEMANTIC_RELEASE_PACKAGE: '@${{ github.repository }}'
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: semantic-release
uses: cycjimmy/semantic-release-action@v2
id: semantic
with:
semantic_version: 15.14.0
extra_plugins: |
@semantic-release/git@7.0.18
@semantic-release/changelog
semantic-release-slack-bot
dry_run: false
- name: echo release outputs
if: steps.semantic.outputs.new_release_published == 'true'
run: |
echo ${{ steps.semantic.outputs.new_release_version }}
echo ${{ steps.semantic.outputs.new_release_major_version }}
echo ${{ steps.semantic.outputs.new_release_minor_version }}
echo ${{ steps.semantic.outputs.new_release_patch_version }}
- name: Setup Node.js with GitHub Package Registry
if: steps.semantic.outputs.new_release_published == 'true'
uses: actions/setup-node@v1
with:
node-version: 12
registry-url: 'https://npm.pkg.github.com'
scope: trilom
- name: Publish To GitHub Package Registry
if: steps.semantic.outputs.new_release_published == 'true'
run: npm publish
env:
NODE_AUTH_TOKEN: ${{ env.GITHUB_TOKEN }}
# create PR from release branch to master to prepare for release
check-release:
name: Check if we need to release
runs-on: ubuntu-latest
needs: [build, test-unit, test-integration, lintdog]
if: >
github.actor != 'semantic-release-bot'
&& ! contains(github.event.head_commit.message, 'trilom/1.')
&& ! contains(github.event.head_commit.message, 'trilom/2.')
&& ! contains(github.event.head_commit.message, 'chore(release):')
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: commit format changes and create authors file
run: |
git config --local user.email "trilom-bot@trailmix.me"
git config --local user.name "trilom-bot"
yarn build
yarn format
git add -A
git diff-index --quiet HEAD || git commit -m "Adding format changes 🤖" -a
yarn build-release
git add -A
git diff-index --quiet HEAD || git commit -m "Adding release changes ⚙️" -a
git log --format='%aN <%aE>%n%cN <%cE>' | sort -u > AUTHORS
sed -i '/trilom-bot/d' AUTHORS
sed -i '/semantic-release-bot/d' AUTHORS
sed -i '/carnoco@gmail.com/d' AUTHORS
sed -i '/GitHub <noreply@github.com>/d' AUTHORS
sed -i '/dependabot/d' AUTHORS
echo -e "\r\n$(date)" >> AUTHORS
git add -A
git diff-index --quiet HEAD || git commit -m "Updating AUTHORS 📓" -a
# see if we need to release, if so create a automerge release PR and notify the original creator
- name: semantic-release
uses: cycjimmy/semantic-release-action@v2
id: semantic
env:
SEMANTIC_RELEASE_PACKAGE: '@${{ github.repository }}'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
with:
semantic_version: 15.14.0
extra_plugins: |
@semantic-release/git@7.0.18
@semantic-release/changelog
semantic-release-slack-bot
dry_run: true
- name: echo release outputs
if: steps.semantic.outputs.new_release_published == 'true'
run: |
echo ${{ steps.semantic.outputs.new_release_version }}
echo ${{ steps.semantic.outputs.new_release_major_version }}
echo ${{ steps.semantic.outputs.new_release_minor_version }}
echo ${{ steps.semantic.outputs.new_release_patch_version }}
- name: push potential formatting changes since there is no release
if: steps.semantic.outputs.new_release_published == 'false'
run: |
git config --local user.email "trilom-bot@trailmix.me"
git config --local user.name "trilom-bot"
git push -f https://x-access-token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git HEAD:${{ github.ref }}
- name: get changed files and format for automerge PR body
id: file_changes
uses: trilom/file-changes-action@master
if: steps.semantic.outputs.new_release_published == 'true'
with:
githubToken: ${{ env.GITHUB_TOKEN }}
output: '_<br />&nbsp;&nbsp;_'
- name: get original PR number
uses: actions/github-script@0.6.0
id: pr
if: steps.semantic.outputs.new_release_published == 'true'
with:
github-token: ${{env.GITHUB_TOKEN}}
result-encoding: string
script: |
const result = await github.repos.listPullRequestsAssociatedWithCommit({
owner: context.payload.repository.owner.name,
repo: context.payload.repository.name,
commit_sha: context.payload.head_commit.id
})
if (result.data.length >= 1) {
return result.data[0].number
} else return 87
- name: get original PR user
uses: actions/github-script@0.6.0
id: login
if: steps.pr.outputs.result != 0 && steps.semantic.outputs.new_release_published == 'true'
with:
github-token: ${{env.GITHUB_TOKEN}}
result-encoding: string
script: |
const result = await github.pulls.get({
owner: context.payload.repository.owner.name,
repo: context.payload.repository.name,
pull_number: ${{ steps.pr.outputs.result }}
})
if (result.data.user === true && result.data.user.login === true) {
return result.data.user.login
} else return 'trilom';
- name: create release PR
id: create-pr
uses: peter-evans/create-pull-request@v2
if: steps.semantic.outputs.new_release_published == 'true'
with:
token: ${{ env.GITHUB_TOKEN }}
commit-message: '${{ github.event.head_commit.message }}'
committer: trilom-bot <trilom-bot@trailmix.me>
author: ${{ steps.login.outputs.result }} <${{ steps.login.outputs.result }}@users.noreply.github.com>
title: 'releases/v${{ steps.semantic.outputs.new_release_version }} [@${{ steps.login.outputs.result }}] - ${{ github.event.head_commit.message }}'
body: |
# @${{ steps.login.outputs.result }} would like to merge into file-changes-action
[**compare link**](${{ github.event.compare }})
## Commits
```json
${{ toJSON(github.event.commits)}}
```
## Files
&nbsp;&nbsp;_${{ steps.file_changes.outputs.files}}_
## Files modified
&nbsp;&nbsp;_${{ steps.file_changes.outputs.files_modified}}_
## Files added
&nbsp;&nbsp;_${{ steps.file_changes.outputs.files_added}}_
## Files removed
&nbsp;&nbsp;_${{ steps.file_changes.outputs.files_removed}}_
labels: 'automated pr'
assignees: '${{ steps.login.outputs.result }},trilom'
reviewers: trilom
branch: '${{ steps.semantic.outputs.new_release_version }}'
- name: notify initial commiter of change
uses: peter-evans/create-or-update-comment@v1
if: steps.login.outputs.result != '' && steps.semantic.outputs.new_release_published == 'true'
with:
token: ${{ env.GITHUB_TOKEN }}
issue-number: ${{ steps.pr.outputs.result }}
body: |
Hey @${{ steps.login.outputs.result }},
This merge has triggered a release, hurray!
[Here you can follow the release.](https://github.com/trilom/file-changes-action/pull/${{ steps.create-pr.outputs.pr_number }})
Please use this new **Pull Request** if there are any issues to communicate further.
Thanks!
# - uses: actions/github-script@0.6.0
# if: steps.create-pr.outputs.pr_number != '' && steps.semantic.outputs.new_release_published == 'true'
# with:
# github-token: ${{ secrets.TRILOM_BOT_TOKEN }}
# script: |
# github.issues.addLabels({owner: context.repo.owner, repo: context.repo.repo, issue_number: ${{ steps.create-pr.outputs.pr_number }},
# labels: ['${{ steps.semantic.outputs.new_release_version }}']
# })
# make sure we can build
build:
name: yarn install && tsc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: yarn build
# unit test with jest
test-unit:
name: jest unit tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: yarn build
- run: yarn test-coverage
- run: bash <(curl -s https://codecov.io/bash)
# integration test with jest
test-integration:
name: jest integration tests
needs: test-unit
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: yarn build
- run: yarn test-integration
env:
GITHUB_TOKEN: ${{ secrets.TRILOM_BOT_TOKEN }}
# lint code and comment back if possible
lintdog:
name: eslintdog (reviewdog)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Lint and report push
uses: reviewdog/action-eslint@v1
with:
github_token: ${{ secrets.TRILOM_BOT_TOKEN }}
reporter: github-check
eslint_flags: 'src/**/*.ts'

View File

@ -0,0 +1,86 @@
# Workflow Information
- [Workflow Information](#workflow-information)
- [Overview](#overview)
- [Schedule](#schedule)
- [Issue Comment](#issue-comment)
- [Pull Request](#pull-request)
- [Push](#push)
# Overview
1. Make a **Pull Request** from your forked branch (forked from _master_) with changes to _trilom/file-changes-action/master_ branch.
2. Once merged into master this will lint the code and provide output in the checks, update the AUTHORS file, and package _dist/_. If there is a release then this will create a **Pull Request** from the _v\*\*_ branch to _master_ and a comment will be made on the original **Pull Request** notifying contributors. If there is not a release the changes will be **push**ed to _master_.
3. In the **Pull Request** linting and testing will be performed again. If _linted_, _tested-unit_, _tested-integration_, _builds_, and _lintdogged_ label exist and _hold merge_ does not the release will be merged into _master_.
4. Once merged this time [semantic-release](https://github.com/semantic-release/semantic-release) will run to create the Github Release, release notes, changelog, notify Slack, package and deploy to NPM and Github Package Repo, label the release, and notify any issues of it's deployment.
5. After user semantic-release-bot commits the release commit, this code will be pushed to the release branch.
## Schedule
- Everyday at 5:00 AM GMT:
- Run integration tests via Github Actions.
## Issue Comment
- When any `created` **Issue Comment** type runs on a **Pull Request** from trilom with the body of `/integrationNUMBER`(**integration.yml**):
- Run integration tests via Github Actions with PR.
- **NOT IMPLEMENTED** When any `created` **Issue Comment** type runs on a **Pull Request** from trilom with the body of `/release`(**automerge.yml**):
- If _linted_, _tested-unit_, _tested-integration_, _builds_, _lintdogged_, and _hold merge_ or _automated merge_ **does not** labels exist:
- Merge the PR and add the _automated merge_ label
- If failure, put some output on the original PR.
## Pull Request
- When any `opened`, `reopened`, or `synchronize` **Pull Request** type runs to the _master_ branch from a _v\*\*_ branch:
- Run integration tests via Github Actions.
- When any `opened` or `reopened` **Pull Request** type runs on any branch other than _master_ from anyone other than trilom or trilom-bot from a forked branch(**close_pr.yml**):
- Close the **Pull Request** and put the dunce cap on.
- When any `labeled`, or `closed` **Pull Request** type runs on _master_, _next_, _alpha_, or _beta_(**automerge.yml**):
- If _linted_, _tested-unit_, _tested-integration_, _builds_, _lintdogged_, and _hold merge_ or _automated merge_ **does not** labels exist:
- Merge the PR and add the _automated merge_ label
- If failure, put some output on the original PR.
- When any `opened`, `reopened`, or `synchronize` **Pull Request** type runs(**pr.yml**):
- Assign it to trilom (**add-reviews**)
- Build code with `yarn build` which runs `yarn` and `tsc` (**build**)
- Label with builds if passing and on inner workspace
- Test code with `yarn test-coverage` which runs `jest --coverage` (**test-unit**)
- Label with tested-unit if passing and on inner workspace
- Test code with `yarn test-integration` which runs `jest -c jest.config.integration.js` (**test-integration**)
- Label with tested-integration if passing and on inner workspace
- Test code with eslint reviewdog and report back if inner workspace (**lintdog**)
- Label with pretty if passing and on inner workspace
- Check format of code with `yarn format-check` which runs `prettier --check` (**format_check_push**)
- If:
- Fork then pull **Pull Request** github.ref with GITHUB_TOKEN
- Inner **Pull Request** then pull HEAD repo ref
- Build code with `yarn build` which runs `yarn` and `tsc`
- If format-check succeeds and on inner workspace
- Label with pretty
- If format-check fails and on inner workspace and actor is not trilom-bot
- Run `yarn format` which runs `prettier --write`
- Clean build files with `yarn clean`
- Commit the format changes as trilom-bot to **Pull Request** head
## Push
- When any **Push** type runs to _master_:
- Run integration tests via Github Actions.
- When any **Push** type runs to _master_, _next_, _alpha_, or _beta_(**push.yml**):
- Build code with `yarn build` which runs `yarn` and `tsc` (**build**)
- Test code with `yarn test-coverage` which runs `jest` (**test**)
- Test code with eslint reviewdog and report back with github checks(**lintdog**)
- When any **Push** type runs to _master_, _next_, _alpha_, or _beta_ with a head_commit message **NOT** containing 'trilom/v1.' or 'trilom/v2.':
- Build with `yarn build-release` which runs `yarn && tsc --build tsconfig.build.json && ncc build --minify` to build the **dist/\*\*.js** files, update **AUTHORS**, format **src/\*\*.ts** files and commit.
- Test [semantic-release](https://github.com/semantic-release/semantic-release) if a release is ready then create a **Pull Request**
- Echo release outputs
- Get changed files with [file-changes-action](https://github.com/trilom/file-changes-action) and build a message to post to new **Pull Request**
- Comment on the original **Pull Request** with the new details of the release.
- If no release, then **Push** changes directly back to master.
- When any **Push** type runs to _master_, _next_, _alpha_, or _beta_ with a head_commit message containing 'trilom/v1.' or 'trilom/v2.':
- Run [semantic-release](https://github.com/semantic-release/semantic-release) to prepare Github Release, release notes, changelog, notify Slack, package and deploy to NPM and Github Package Repo, label the release, and notify any issues of it's deployment.
- When any **Push** type runs to _master_, _next_, _alpha_, or _beta_ from semantic-release-bot with a head_commit message containing 'chore(release):':
- Get the **Pull Request** number from the **Push** and push the semantic-release changes to the tagged release branch.

View File

@ -0,0 +1,100 @@
lib
**/outputs/**
# Dependency directory
node_modules
# Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
jspm_packages/
# # TypeScript v1 declaration files
# typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# OS metadata
.DS_Store
Thumbs.db
# Ignore built ts files
__tests__/runner/*

View File

@ -0,0 +1,2 @@
/dist
/node_modules

View File

@ -0,0 +1,9 @@
printWidth: 80
tabWidth: 2
useTabs: false
semi: false
singleQuote: true
trailingComma: none
bracketSpacing: false
arrowParens: avoid
parser: typescript

View File

@ -0,0 +1,62 @@
branches:
- "+([1-9])?(.{+([1-9]),x}).x"
- master
- next
- name: alpha
prerelease: true
- name: beta
prerelease: true
dryRun: false
plugins:
- "@semantic-release/commit-analyzer"
- "@semantic-release/release-notes-generator"
-
- semantic-release-slack-bot
- notifyOnSuccess: true
notifyOnFail: true
markdownReleaseNotes: true
onSuccessTemplate:
text: "$package_name version v$npm_package_version!\n\n$release_notes"
-
- "@semantic-release/changelog"
- changelogFile: CHANGELOG.md
- "@semantic-release/npm"
-
- "@semantic-release/github"
- assets:
- path: "dist/**/*.js"
label: Packaged JS Code
successComment: >
# 🎉🦍🎉 This <% issue.pull_request ? 'pull request' : 'issue' %>
has been resolved in version *<%= nextRelease.version %>* at
*trilom/file-changes-action@<%= nextRelease.gitTag %>*
` - name: File Changes Action
uses: trilom/file-changes-action@<%= nextRelease.gitTag %>`
## Release<%= _.size(releases) > 1 ? 's' : '' %>
<% _.forEach(releases, function(release) { %>
\n\t\t**Release Name:** [<%= release.name %>](<%= release.url %>)<% }); %>
\n\n ## Commits<% _.forEach(commits, function(commit) { %>
\n\t\t@<%= commit.author %> - [_<%= commit.message %>_](https://github.com/trilom/file-changes-action/commit/<%= commit.hash %>)<% }); %>"
**Release Name:** [<%= release.name %>](<%= release.url %>)<% }); %>
## Commits
<% _.forEach(commits, function(commit) { %>
@<%= commit.author.name %> - [_<%= commit.message.toString().replace(/[()\\\/_\*]/g, '') %>_](https://github.com/trilom/file-changes-action/commit/<%= commit.hash %>)<% }); %>
labels: [failure]
releasedLabels: ["releases/${nextRelease.gitTag}"]
assignees: trilom
-
- "@semantic-release/git"
- assets: [CHANGELOG.md, package.json, yarn.lock]
message: >
chore(release): 🎉🦍🎉 Release <%= nextRelease.version %> -
<%= new Date().toLocaleDateString('en-US', {year: 'numeric', month: 'short', day: 'numeric', hour: 'numeric', minute: 'numeric' }) %> [skip ci]
`- name: File Changes Action
uses: trilom/file-changes-action@<%= nextRelease.gitTag %>`
<%= nextRelease.notes %>

View File

@ -0,0 +1,5 @@
Bryan Killian <bryan.v.killian@gmail.com>
Daniel Orner <daniel.orner@wishabi.com>
Sergey Kluchkovsky <kaineer@gmail.com>
Thu May 21 14:42:36 UTC 2020

View File

@ -0,0 +1,45 @@
## [1.2.4](https://github.com/trilom/file-changes-action/compare/v1.2.3...v1.2.4) (2020-05-21)
### Bug Fixes
* **change in api:** github api had a change, this should trigger release 1.2.4. this change here quiets a quacker during the intergration test ([99f8f91](https://github.com/trilom/file-changes-action/commit/99f8f91f3ed1430713973d8f1e2848b5acc58163))
## [1.2.3](https://github.com/trilom/file-changes-action/compare/v1.2.2...v1.2.3) (2020-03-25)
### Bug Fixes
* **test release:** testing a release ([dfca448](https://github.com/trilom/file-changes-action/commit/dfca448d9d1f04825a549ba0bc7d6b097df295a2))
## [1.2.2](https://github.com/trilom/file-changes-action/compare/v1.2.1...v1.2.2) (2020-03-25)
### Bug Fixes
* **issue_comment:** this needs to return PR info not commit info if before and after explicitly set, else PR ([eee976b](https://github.com/trilom/file-changes-action/commit/eee976b2219f243f83583baab84fa89376006acc))
* **naming:** renamed "deleted" to "removed". sorry if this is breaking for you. ([800537f](https://github.com/trilom/file-changes-action/commit/800537f435a66454c64fc2b42cfd82ca33cc093d))
* **pull_request_synchronize events:** issue with PR Synchronize events, it would return commit files instead of PR files, this is adjusted to return ALL PR files with PR synchronize event ([fb7bcc7](https://github.com/trilom/file-changes-action/commit/fb7bcc76581402f20aa64da82cd1174e313ec02c))
* **space issue:** this should resolve the issue with using a blank space. the assumption here is that 'json' is default, if you use ' ' it will be '' which is the app default, not the action default of 'json' ([0e4184f](https://github.com/trilom/file-changes-action/commit/0e4184fe04f87323c60b71c1ccf2af95f9f35b8c)), closes [#81](https://github.com/trilom/file-changes-action/issues/81)
## [1.2.1](https://github.com/trilom/file-changes-action/compare/v1.2.0...v1.2.1) (2020-03-19)
### Bug Fixes
* **everything:** very proud to say this is 100% coverage according to default jest of all src code (including test) ([dd31d02](https://github.com/trilom/file-changes-action/commit/dd31d0220fdc9e6eb3469b3443239359d7da33d4))
* **redesign:** a lot of things changed here in the project ([32903fd](https://github.com/trilom/file-changes-action/commit/32903fd341ce6a5471e3df73393784cb43adb397))
# [1.2.0](https://github.com/trilom/file-changes-action/compare/v1.1.0...v1.2.0) (2020-03-02)
### Features
* **action:** githubToken is optional (uses action token), added githubRepo, prNumber, and pushBefore & After ([b24e2c3](https://github.com/trilom/file-changes-action/commit/b24e2c30c72710da8704a02f9d05141a19f27f83))
# [1.2.0](https://github.com/trilom/file-changes-action/compare/v1.1.0...v1.2.0) (2020-03-02)
### Features
* **action:** githubToken is optional (uses action token), added githubRepo, prNumber, and pushBefore & After ([b24e2c3](https://github.com/trilom/file-changes-action/commit/b24e2c30c72710da8704a02f9d05141a19f27f83))

View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2018 GitHub, Inc. and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,201 @@
# file-changes-action
[![codecov](https://codecov.io/gh/trilom/file-changes-action/branch/master/graph/badge.svg)](https://codecov.io/gh/trilom/file-changes-action)
[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier)
![Integration Tests](https://github.com/trilom/file-changes-action/workflows/Integration%20Tests/badge.svg)
# Like my work? Hire me!
> Please reach out if you need something built!
This action will take the information from the Push/Pull Request and output some variables and write files that will let you know what was changed, removed, or added.
## Inputs
### githubRepo
_Optional_ - `string` - the github repository you want to compare changes from, defaults to the github.repository.
### githubToken
_Optional_ - `string` - specific github token, github.token is used by default (Github Action Runner)
### output
_Optional_ - `string` - type of output for output variables, default is json. Use ',' for comma separated values, or ' ' for space delimited values. You can also create your own delimiter for example ' |FILE:' will output 'file1.yml |FILE:file2.yml |FILE:file3.yml'.
### fileOutput
_Optional_ - `string` - type of output for file output, default is json. Use ',' for comma separated values, or ' ' for space delimited values. You can also create your own delimiter for example `\ |FILE:` will output:
> file1.yml |FILE:file2.yml |FILE:file3.yml
If you select json then the file format will be .json, if you select ',' then the file format will be .csv, anything else will output the files as .txt
### pushBefore
_Optional_ - `string` - pass in a specific sha to compare to as a before, required if using pushAfter. (push payload after github.payload.before)
### pushAfter
_Optional_ - `string` - pass in a specific sha to compare to as an after, required if using pushBefore. (push payload after github.payload.after)
### prNumber
_Optional_ - `string` - pass in a specific PR number to get file changes from.
## Outputs
### files
steps.file_changes.outputs.files - `string` - The names all new, updated, and removed files. The output is dependant on the output input, default is a json string.
### files_added
steps.file_changes.outputs.files_added - `string` - The names of the newly created files. The output is dependant on the output input, default is a json string.
### files_modified
steps.file_changes.outputs.files_modified - `string` - The names of the updated files. The output is dependant on the output input, default is a json string.
### files_removed
steps.file_changes.outputs.files_removed - `string` - The names of the removed files. The output is dependant on the output input, default is a json string.
## Example usage
```yaml
# bare minimal
name: changes
on: push
jobs:
changes:
runs-on: ubuntu-latest
steps:
- id: file_changes
uses: trilom/file-changes-action@v1.2.3
### full
name: changes
on: [push, pull_request] # push or pull, or any event with custom pr number or before/after commit sha
jobs:
changes:
runs-on: ubuntu-latest
steps:
- id: file_changes
uses: trilom/file-changes-action@v1.2.3
with:
# optional target repo
githubRepo: trilom/file-changes-action
# optional token
githubToken: ${{ secrets.BOT_TOKEN }}
# optional output format
output: 'json'
# optional fileoutput format
fileOutput: 'csv'
# optional push before SHA (need both before and after)
pushBefore: 79eeec74aebc3deb0a2f6234c5ac13142e9224e5
# optional push after SHA (need both before and after)
pushAfter: 1c5a2bfde79e2c9cffb75b9a455391350fe69a40
# optional PR number to compare
prNumber: 36
```
## How to Use
In order to make those decisions we need to know what files have changed and that is where this action comes in. In the example below we are checking out our repository code, and then running the `trilom/file-changes-action@v1` action. The only thing you need to provide is a GITHUB_TOKEN so that Octokit can make it's API calls.
If a PR is made then it will look at all of the files included in the PR.
If a push is made then it will compare commits from the SHA `github.payload.before` to the SHA `github.payload.after` of the push.
After gathering this information it will output the files in 2 ways.
- As an output variable, you can use this variable by using `steps.file_changes_outputs_files`, `steps.file_changes.outputs.files_modified`, `steps.file_changes.outputs.files_added`, `steps.file_changes.outputs.files_removed`.
- As a file on the container stored at `$HOME/files.json`, `$HOME/files_modified.json`, `$HOME/files_added.json`, `$HOME/files_removed.json`.
- _NOTE:_ If you set a custom delimiter in output or fileOutput inputs then you will receive different files. For example a delimiter of ',' will output at `$HOME/files.csv` instead of `$HOME/files.json`. Likewise, anything other than 'json' or ',' delmiters will output `$HOME/files.txt` files instead of `$HOME/files.json` by default.
## Use Cases
I have a process where I have AWS Cloudformation templates stored in one directory that might be named PRODUCT-ROLE, and mappings for these templates that span the PRODUCT. For example **mappings/wordpress.mappings.yml, templates/wordpress-database.yml, templates/wordpress-webserver.yml**, and some of the templates might use different Lambda functions defined in for example **functions/wordpress-webserver/**.
In the example below we have a workflow that on *push* to the develop branch we can perform some actions based on the files. In my use case I look for changes on the develop branch of this repository for every push that happens. When a push happens and a change is made to any of the paths below the workflow will trigger. With this action you are able to know exactly which files changed so that you can make decisions later in your CI/CD.
In this case, if a **templates/*.yml** file is changed, then we want to update the Cloudformation stack. We can also write specifics for related templates. For example, if **templates/wordpress-database.yml** changes then we want to deploy **templates/wordpress-webserver.yml** as well after.
Another case is if the **mappings/wordpress.mappings.yml** changes, we want to deploy all **template/wordpress-*.yml** files.
## More examples
```yaml
name: push-develop
on: [push]
jobs:
changes:
runs-on: ubuntu-latest
steps:
- id: file_changes
uses: trilom/file-changes-action@v1.2.3
- name: test
run: |
cat $HOME/files.json
cat $HOME/files_modified.json
cat $HOME/files_added.json
cat $HOME/files_removed.json
echo '${{ steps.file_changes.outputs.files}}'
echo '${{ steps.file_changes.outputs.files_modified}}'
echo '${{ steps.file_changes.outputs.files_added}}'
echo '${{ steps.file_changes.outputs.files_removed}}'
```
You can set the output and fileOutput to ',' for csv output.
```yaml
name: push-develop
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: file_changes
uses: trilom/file-changes-action@v1.2.3
with:
output: ','
fileOutput: ','
- name: test
run: |
cat $HOME/files.csv
```
You can set the output and fileOutput to ' ' for txt output. We also used a specific token, and got info for the PR that this push came from.
```yaml
name: push-develop
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@0.6.0
id: pr
with:
github-token: ${{env.BOT_USER_TOKEN}}
result-encoding: string
script: |
const result = await github.repos.listPullRequestsAssociatedWithCommit({
owner: context.payload.repository.owner.name,
repo: context.payload.repository.name,
commit_sha: context.payload.head_commit.id
})
return result.data[0].number;
- id: file_changes
uses: trilom/file-changes-action@v1.2.3
with:
githubToken: ${{ env.BOT_USER_TOKEN }}
prNumber: ${{ steps.pr.outputs.results }}
output: ' '
fileOutput: ' '
- name: test
run: |
cat $HOME/files.txt
```

View File

@ -0,0 +1,43 @@
name: 'File Changes Action'
description: 'Creates outputs variables of files modified, added, or removed by a PR or Push.'
author: 'Bryan Killian <me@trilom.org>'
inputs:
githubRepo:
description: 'The github repository you want to compare changes from, defaults to the github.repository.'
required: false
githubToken:
description: 'The github action token will be used by default, if you want to use something different than you can pass it in here.'
default: ${{ github.token }}
required: true
pushBefore:
description: 'Pass in a specific sha to compare to as a before, required if using pushAfter. (push BASE payload after github.payload.before)'
required: false
pushAfter:
description: 'Pass in a specific sha to compare to as an after, required if using pushBefore. (push HEAD payload after github.payload.after)'
required: false
prNumber:
description: 'Pass in a specific PR number to get file changes from.'
required: false
output:
description: 'Choose between json (default), or custom delimiter by passing a string, for example '','' for csv variable output'
required: true
default: json
fileOutput:
description: 'Choose between json (default), or custom delimiter by passing a string, for example '','' for csv file output. If you set as json the file output will be suffixed with .json, if you select '','' then the output will be .csv, else .txt will be the output.'
required: true
default: json
outputs:
files:
description: 'The names all new, updated, and removed files'
files_added:
description: 'The names of the newly created files'
files_modified:
description: 'The names of the updated files'
files_removed:
description: 'The names of the removed files'
runs:
using: 'node12'
main: 'dist/index.js'
branding:
icon: 'file-text'
color: 'red'

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,5 @@
var config = require('./jest.config')
config.testPathIgnorePatterns = ['!/src/tests/integration.test.ts']
config.testMatch = ['**/integration.test.ts']
console.log('RUNNING INTEGRATION TESTS')
module.exports = config

View File

@ -0,0 +1,29 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: "node",
testRunner: 'jest-circus/runner',
testMatch: ['**/*.test.ts'],
testPathIgnorePatterns: ['/src/tests/integration.test.ts'],
clearMocks: true,
collectCoverage: false,
coverageThreshold: {
global: {
branches: 50,
functions: 70,
lines: 75,
statements: 75
},
'./src/*.ts': {
branches: 70,
functions: 85,
lines: 85,
statements: 85
},
'./src/tests/**/*.ts': {
branches: 50,
functions: 60,
lines: 65,
statements: 65
}
}
}

7238
.github/actions/file-changes-action/package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,67 @@
{
"name": "@trilom/file-changes-action",
"version": "1.2.4",
"engines": {
"node": "12.16.3"
},
"description": "Creates outputs variables of files modified, added, or removed by a PR or Push.",
"main": "lib/main.js",
"scripts": {
"build": "yarn && tsc",
"build-package": "yarn build --build tsconfig.build.json && ncc build",
"build-release": "yarn build-package --minify",
"test": "jest",
"test-coverage": "jest --coverage",
"test-integration": "jest -c jest.config.integration.js && rm -rf src/tests/outputs",
"format": "prettier --write '**/*.ts'",
"format-check": "prettier --check '**/*.ts'",
"lint": "eslint --ext .ts ./",
"clean": "rm -rf node_modules lib"
},
"repository": {
"type": "git",
"url": "git+https://github.com/trilom/file-changes-action.git"
},
"keywords": [
"actions",
"action",
"github-action",
"github-actions",
"github",
"node",
"typescript",
"examples"
],
"author": "Bryan Killian <me@trilom.org>",
"homepage": "https://github.com/trilom/file-changes-action#README",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.2.3",
"@actions/github": "^2.1.1"
},
"devDependencies": {
"@octokit/types": "^2.5.0",
"@types/jest": "^24.9.1",
"@typescript-eslint/eslint-plugin": "^2.24.0",
"@typescript-eslint/parser": "^2.24.0",
"@zeit/ncc": "^0.20.5",
"codecov": "^3.6.5",
"eslint": "^6.8.0",
"eslint-config-airbnb-typescript": "^7.0.0",
"eslint-config-prettier": "^6.10.0",
"eslint-import-resolver-typescript": "^2.0.0",
"eslint-plugin-eslint-comments": "^3.1.2",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-jest": "^23.8.2",
"eslint-plugin-jsx-a11y": "^6.2.3",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-react-hooks": "^2.5.0",
"eslint-plugin-unicorn": "^17.2.0",
"jest": "^24.9.0",
"jest-circus": "^24.9.0",
"prettier": "^1.19.1",
"ts-jest": "^24.3.0",
"typescript": "^3.8.3"
}
}

View File

@ -0,0 +1,150 @@
import {setOutput as coreSetOutput, debug as coreDebug} from '@actions/core'
import {writeFileSync} from 'fs'
import {ChangedFiles} from 'typings/ChangedFiles'
import {GitHubFile} from 'typings/GitHubFile'
import {getErrorString} from './UtilsHelper'
/**
* @function sortChangedFiles
* @param files pass in array of GithubFile's to be sorted
* @returns ChangedFiles object that has .files, .added, .modified, .renamed, and .removed
*/
export function sortChangedFiles(files: GitHubFile[]): ChangedFiles {
try {
coreDebug(
`Here are the files I am changing: ${JSON.stringify(files, null, 2)}`
)
const changedFiles = {
files: [],
added: [],
removed: [],
renamed: [],
modified: []
} as ChangedFiles
files.forEach(f => {
changedFiles[f.status].push(
f.filename || f.added || f.removed || f.renamed || f.modified
)
changedFiles.files.push(
f.filename || f.added || f.removed || f.modified || f.renamed
)
})
return changedFiles
} catch (error) {
const eString = `There was an issue sorting files changed.`
throw new Error(
getErrorString(
error.name,
error.status,
sortChangedFiles.name,
eString,
JSON.stringify(error)
)
)
}
}
/**
* @function getFormatExt
* @param format output format 'json' = '.json' ',' = '.csv' anything else is '.txt'.
* @returns file extension, '.json', '.csv', or '.txt'
*/
export function getFormatExt(format: string): string {
let ext
switch (format.trim()) {
case 'json':
ext = '.json'
break
case ',':
ext = '.csv'
break
default:
ext = '.txt'
break
}
return ext
}
/**
* @function formatChangedFiles
* @param format output format 'json' will stringify anything else will files.join('string')
* @param files string list of files to format
* @returns string for output of changedFiles
*/
export function formatChangedFiles(format: string, files: string[]): string {
if (format === 'json') {
return JSON.stringify(files)
}
return files.join(format)
}
/**
* @function writeFiles
* @param format output format 'json' will stringify anything else will files.join('string')
* @param key changedFiles type added, modified, removed, or files
* @param files string list of files to format
* @returns string output to be stored in file
*/
export function writeFiles(format: string, key: string, files: string[]): void {
try {
const ext = getFormatExt(format)
const fileName = key === 'files' ? `${key}${ext}` : `files_${key}${ext}`
coreDebug(
`Writing output file ${
process.env.HOME
}/${fileName} with ${format} and files ${JSON.stringify(files, null, 2)}`
)
writeFileSync(
`${process.env.HOME}/${fileName}`,
formatChangedFiles(format, files),
'utf-8'
)
} catch (error) {
const eString = `There was an issue writing output files.`
throw new Error(
getErrorString(
error.name,
error.status,
writeFiles.name,
eString,
JSON.stringify(error)
)
)
}
}
/**
* @function writeOutput
* @param format output format 'json' will stringify anything else will files.join('string')
* @param key changedFiles type added, modified, removed, or files
* @param files string list of files to format
* @returns string output to be stored to action output
*/
export function writeOutput(
format: string,
key: string,
files: string[]
): void {
try {
const fileName = key === 'files' ? key : `files_${key}`
coreDebug(
`Writing output ${fileName} with ${format} and files ${JSON.stringify(
files,
null,
2
)}`
)
coreSetOutput(fileName, formatChangedFiles(format, files))
} catch (error) {
const eString = `There was an issue setting action outputs.`
throw new Error(
getErrorString(
error.name,
error.status,
writeOutput.name,
eString,
JSON.stringify(error)
)
)
}
}

View File

@ -0,0 +1,176 @@
import {GitHub} from '@actions/github'
import {GitHubFile} from 'typings/GitHubFile'
import {Inferred} from 'typings/Inferred'
import {getErrorString} from './UtilsHelper'
/**
* @function initClient
* @throws {Error} not sure what might trigger this, but it will throw an error.
* @param token github token to add to client
* @returns authenticated github client
*/
export function initClient(token: string): GitHub {
try {
return new GitHub(token)
} catch (error) {
const eString = `There was an error creating github client. Please check your token.`
throw new Error(
getErrorString(error.name, error.status, initClient.name, eString, error)
)
}
}
/**
* @function getChangedPRFiles
* @throws {Error} when a 404 or other is received. 404 can be bad repo, owner, pr, or unauthenticated
* @param client authenticated github client (possibly un-authenticated if public)
* @param repo repo string. file-changes-action
* @param owner owner string. trilom
* @param pullNumber pr number to get changed files for
* @returns Promise of array of changed files
*/
export async function getChangedPRFiles(
client: GitHub,
repo: string,
owner: string,
pullNumber: number
): Promise<GitHubFile[]> {
try {
const options = client.pulls.listFiles.endpoint.merge({
owner,
repo,
pull_number: pullNumber
})
const files: GitHubFile[] = await client.paginate(
options,
response => response.data
)
return files
} catch (error) {
const eString = `There was an error getting change files for repo:${repo} owner:${owner} pr:${pullNumber}`
let ePayload: string
if (error.name === 'HttpError' && +error.status === 404)
ePayload = getErrorString(
error.name,
error.status,
getChangedPRFiles.name,
eString,
error
)
else
ePayload = getErrorString(
`Unknown Error:${error.name || ''}`,
error.status,
getChangedPRFiles.name,
eString,
error.message
)
throw new Error(ePayload)
}
}
/**
* @function getChangedPushFiles
* @throws {Error} when a 404 or other is received. 404 can be bad repo, owner, sha, or unauthenticated
* @param client authenticated github client (possibly un-authenticated if public)
* @param repo repo string. file-changes-action
* @param owner owner string. trilom
* @param base BASE commit sha to compare
* @param head HEAD commit sha to compare
* @returns Promise of array of changed files
*/
export async function getChangedPushFiles(
client: GitHub,
repo: string,
owner: string,
base: string,
head: string
): Promise<GitHubFile[]> {
try {
const options = client.repos.compareCommits.endpoint.merge({
owner,
repo,
base,
head
})
const files: GitHubFile[] = await client.paginate(
options,
response => response.data.files
)
return files
} catch (error) {
const eString = `There was an error getting change files for repo:${repo} owner:${owner} base:${base} head:${head}`
let ePayload: string
if (error.name === 'HttpError' && +error.status === 404)
ePayload = getErrorString(
error.name,
error.status,
getChangedPushFiles.name,
eString,
error
)
else
ePayload = getErrorString(
`Unknown Error:${error.name || ''}`,
error.status,
getChangedPushFiles.name,
eString,
error.message
)
throw new Error(ePayload)
}
}
/**
* @function getChangedFiles
* @param client client authenticated github client (possibly un-authenticated if public)
* @param repoFull repo owner/repo string. trilom/file-changes-action
* @type {Inferred} pass in iinferred type from inferInput
* @returns Promise of an array of changed PR or push files
*/
export async function getChangedFiles(
client: GitHub,
repoFull: string,
{before, after, pr = NaN}: Inferred
): Promise<GitHubFile[]> {
try {
if (repoFull.split('/').length > 2) {
throw new Error(
getErrorString(
`Bad-Repo`,
500,
'self',
`Repo input of ${repoFull} has more than 2 length after splitting.`
)
)
}
const owner = repoFull.split('/')[0]
const repo = repoFull.split('/')[1]
let files: GitHubFile[] = []
if (Number.isNaN(pr))
files = await getChangedPushFiles(
client,
repo,
owner,
before || '',
after || ''
)
else files = await getChangedPRFiles(client, repo, owner, pr)
return files
} catch (error) {
const pError = JSON.parse(error.message)
if (pError.from.includes('getChanged'))
throw new Error(
JSON.stringify(
{...pError, ...{from: `${error.status}/${error.name}`}},
null,
2
)
)
const eString = `There was an error getting change files outputs pr: ${pr} before: ${before} after: ${after}`
const ePayload: string = getErrorString(
`Unknown Error:${error.name}`,
error.status,
getChangedFiles.name,
eString,
error.message
)
throw new Error(ePayload)
}
}

View File

@ -0,0 +1,120 @@
import {warning as coreWarning, getInput as coreGetInput} from '@actions/core'
import {context} from '@actions/github'
import {Inferred} from 'typings/Inferred'
import {Inputs} from 'typings/Inputs'
import {getErrorString} from './UtilsHelper'
/**
* @function getInputs
* @description reads the inputs to the action with core.getInput and returns object
* @returns {Inputs} object of inputs for the github action
*/
export function getInputs(): Inputs {
try {
const githubToken =
coreGetInput('githubToken') || process.env.GITHUB_TOKEN || false
if (!githubToken)
throw new Error(
getErrorString(
'getInputs Error',
500,
getInputs.name,
'Received no token, a token is a requirement.'
)
)
let prNumber
if (typeof context.issue.number !== 'undefined') {
if (
+coreGetInput('prNumber') !== context.issue.number &&
coreGetInput('prNumber')
) {
prNumber = +coreGetInput('prNumber')
} else {
prNumber = context.issue.number
}
} else {
prNumber = +coreGetInput('prNumber') || NaN
}
return {
githubRepo:
coreGetInput('githubRepo') ||
`${context.repo.owner}/${context.repo.repo}`,
githubToken,
pushBefore:
coreGetInput('pushBefore') ||
(context.payload.before === undefined ? false : context.payload.before),
pushAfter:
coreGetInput('pushAfter') ||
(context.payload.after === undefined ? false : context.payload.after),
prNumber,
output: coreGetInput('output') || ' ',
fileOutput: coreGetInput('fileOutput') || ' ',
event: context.eventName
} as Inputs
} catch (error) {
const eString = `Received an issue getting action inputs.`
const retVars = Object.fromEntries(
Object.entries(process.env).filter(
key =>
key[0].includes('GITHUB') ||
key[0].includes('INPUT_') ||
key[0] === 'HOME'
)
)
throw new Error(
getErrorString('getInputs Error', 500, getInputs.name, eString, retVars)
)
}
}
/**
* @function inferInput
* @param before BASE commit sha to compare
* @param after HEAD commit sha to compare
* @param pr pr number to get changed files for
* @returns {Inferred} object of inferred input for the action
*/
export function inferInput(
before: string,
after: string,
pr: number
): Inferred {
const event = context.eventName
const weirdInput = `Received event from ${event}, but also received a before(${before}) or after(${after}) value.\n I am assuming you want to use a Push event but forgot something, so I'm giving you a message.`
const allInput = `Received event from ${event}, but received a before(${before}), after(${after}), and PR(${pr}).\n I am assuming you want to use one or the other but I am giving you Push.`
if (event === 'pull_request') {
if (
before &&
after &&
(before !== context.payload.before || after !== context.payload.after)
)
return {before, after} // PR(push) - pull_request event with push inputs | PUSH
if (before || after) coreWarning(weirdInput) // PR(push) - pull_request event with single push input | PR*
return {pr} // PR - pull_request event with no push inputs | PR
}
if (event === 'push') {
if (pr) return {pr} // Push(PR) - push event with pr inputs | PR
return {before, after} // Push - push event with no pr inputs | PUSH
}
if (pr) {
if (before && after) {
coreWarning(allInput) // Not PR or Push - all inputs | PUSH*
if (event === 'issue_comment') return {before, after} // If you explicitly set a before/after in an issue comment it will return those
return {pr} // Not PR or Push - pr inputs | PR if a PR before and after assume its a synchronize and return the whole PR
}
if (before || after) coreWarning(weirdInput) // Not PR or Push - pull_request event with single push input | PR*
return {pr} // Not PR or Push - pr inputs | PR
}
if (before || after) {
if (!(before && after)) {
const eString = `Received event from ${event}, but only received a before(${before}) or after(${after}).\n I need both of these if you want to use a Push event.`
throw new Error(
getErrorString('inferInput Error', 500, inferInput.name, eString)
)
}
return {before, after} // Not PR or Push - push inputs | PUSH
}
const eString = `Received event from ${event}, but received no inputs. {event_name:${event}, pr: ${+pr}, before:${before}, after:${after}}`
throw new Error(
getErrorString('inferInput Error', 500, inferInput.name, eString)
)
}

View File

@ -0,0 +1,59 @@
import {setFailed} from '@actions/core'
import {ActionError} from 'typings/ActionError'
/**
* @function getErrorString
* @param name name of error
* @param status status code of error
* @param from name of function that error is thrown from
* @param message error message
* @param error error object to stringify and attach
*/
export function getErrorString(
name: string,
status = 500,
from: string,
message: string,
error: any = ''
): string {
try {
const test = JSON.stringify(
{
error: `${status}/${name}`,
from,
message,
payload: error
} as ActionError,
null,
2
)
return test
} catch (error_) {
setFailed(`Error throwing error.\n ${JSON.stringify(error_.message)}`)
throw new Error(
JSON.stringify({name: '500/undefined', message: 'Error throwing error.'})
)
}
}
/**
* @function errorMessage
* @param f name of function
* @param e error object
* @returns error message for function
*/
export function errorMessage(f: string, e: Error): string {
const error = JSON.stringify(e, null, 2)
let ret
if (f.includes('getInputs')) ret = `There was an getting action inputs.`
if (f.includes('inferInput'))
ret = `There was an issue inferring inputs to the action.`
if (f.includes('initClient'))
ret = `There was an issue initilizing the github client.`
if (f.includes('getChangedFiles'))
ret = `There was an issue getting changed files from Github.`
if (f.includes('sortChangedFiles'))
ret = `There was an issue sorting changed files from Github.`
if (f.includes('writeFiles')) ret = `There was an issue writing output files.`
if (f.includes('writeOutput'))
ret = `There was an issue writing output variables.`
return `${ret}\nException: ${error}`
}

View File

@ -0,0 +1,40 @@
import {setFailed as coreSetFailed} from '@actions/core'
import {getInputs, inferInput} from './InputHelper'
import {writeOutput, writeFiles, sortChangedFiles} from './FilesHelper'
import {getChangedFiles, initClient} from './GithubHelper'
import {errorMessage} from './UtilsHelper'
// figure out if it is a PR or Push
export async function run(): Promise<void> {
try {
// get inputs
const inputs = getInputs()
// parse input
const inferred = inferInput(
inputs.pushBefore,
inputs.pushAfter,
inputs.prNumber
)
// prepare client
const client = initClient(inputs.githubToken)
// get changed files
const changedFilesArray = await getChangedFiles(
client,
inputs.githubRepo,
inferred
)
// sort changed files
const changedFiles = sortChangedFiles(changedFilesArray)
Object.keys(changedFiles).forEach(key => {
// write file output
writeFiles(inputs.fileOutput, key, changedFiles[key])
// write output vars
writeOutput(inputs.output, key, changedFiles[key])
})
} catch (error) {
const pError = JSON.parse(error.message)
coreSetFailed(errorMessage(pError.from, pError))
throw new Error(JSON.stringify(pError))
}
}
/* istanbul ignore next */
if (!(process.env.INPUT_MOCK === 'true')) run()

View File

@ -0,0 +1,240 @@
import {Env, p, getTestFiles, getTestEvents} from './mocks/env'
let env: Env
describe('Testing FilesHelper.ts...', () => {
describe('...with push event...', () => {
beforeAll(() => {
env = new Env({}, {githubToken: 'TestToken'}, 'push')
})
afterEach(() => {
process.env = {...env.envStart}
jest.resetModules()
env = new Env({}, {}, 'push')
})
/**
* @function sortChangedFiles
*/
describe('...with function sortChangedFiles...', () => {
it.each([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])(
'...correctly sorts GithubFile array into ChangedFiles object %i/10 times',
() => {
const {files, stats} = getTestFiles()
const changedFiles = require('../FilesHelper').sortChangedFiles(files)
const coreDebug = require('@actions/core').debug
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(files, null, 2))
)
const retStats = {
files: 0,
added: 0,
removed: 0,
modified: 0,
renamed: 0
} as {
[key: string]: number
}
Object.keys(changedFiles).forEach(key => {
retStats[key] = changedFiles[key].length
})
expect(retStats).toStrictEqual(stats)
}
)
it.each([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])(
'...correctly sorts GithubFile array into ChangedFiles object without filenames %i/10 times',
() => {
const {files, stats} = getTestFiles()
const changedFiles = require('../FilesHelper').sortChangedFiles(files)
const coreDebug = require('@actions/core').debug
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(files, null, 2))
)
const retStats = {
files: 0,
added: 0,
removed: 0,
modified: 0,
renamed: 0
} as {
[key: string]: number
}
Object.keys(changedFiles).forEach(key => {
retStats[key] = changedFiles[key].length
})
expect(retStats).toStrictEqual(stats)
}
)
it('...throws an error', () => {
expect(() =>
require('../FilesHelper').sortChangedFiles({
filename: '/test/file.txt',
status: 'noexist'
})
).toThrowError(
JSON.stringify(
{
error: '500/TypeError',
from: 'sortChangedFiles',
message: 'There was an issue sorting files changed.',
payload: JSON.stringify({})
},
null,
2
)
)
})
})
/**
* @function getFormatExt
*/
describe('...with function getFormatExt...', () => {
it.each(getTestEvents(p.getFormatExtInputs, 'push'))(
'...sets %s ext for input "%s" should be "%s"',
(inputName, input, expected) => {
const ext = require('../FilesHelper').getFormatExt(input)
expect(ext).toBe(expected)
}
)
})
/**
* @function formatChangedFiles
*/
describe('...with function formatChangedFiles...', () => {
it.each(
getTestEvents(
p.changedFilesInput('push', ['/test/file', '/test/file2']),
'push'
)
)('... with %o', (format, input, expected) => {
const ext = require('../FilesHelper').formatChangedFiles(format, input)
expect(ext).toBe(expected)
if (format === 'json') expect(ext).toBe(`["${input[0]}","${input[1]}"]`)
else expect(ext).toBe(`${input[0]}${format}${input[1]}`)
})
it.each(getTestEvents(p.changedFilesInput('push'), 'push'))(
'...formats a big list %s',
(inputName, input, expected) => {
const ext = require('../FilesHelper').formatChangedFiles(
inputName,
input
)
expect(ext).toBe(expected)
}
)
})
/**
* @function writeFiles
*/
describe('...with function writeFiles...', () => {
it.each(getTestEvents(p.changedFilesInput('push'), 'push'))(
'...writesFiles %s',
(inputName, input, expected) => {
const coreDebug = require('@actions/core').debug
const fsWriteFilesSync = require('fs').writeFileSync
const format = require('../FilesHelper').getFormatExt(inputName)
require('../FilesHelper').writeFiles(inputName, 'testKey', input)
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(input, null, 2))
)
expect(fsWriteFilesSync).toHaveBeenCalledWith(
`${process.env.HOME}/files_testKey${format}`,
expected,
'utf-8'
)
}
)
it.each(getTestEvents(p.changedFilesInput('push'), 'push'))(
'...writesFiles %s with files key',
(inputName, input, expected) => {
const coreDebug = require('@actions/core').debug
const fsWriteFilesSync = require('fs').writeFileSync
const format = require('../FilesHelper').getFormatExt(inputName)
require('../FilesHelper').writeFiles(inputName, 'files', input)
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(input, null, 2))
)
expect(fsWriteFilesSync).toHaveBeenCalledWith(
`${process.env.HOME}/files${format}`,
expected,
'utf-8'
)
}
)
it('...throws error', () => {
const coreDebug = require('@actions/core').debug
expect(() =>
require('../FilesHelper').writeFiles('error', 'testKey', 'json')
).toThrowError(
new Error(
JSON.stringify(
{
error: '500/TypeError',
from: 'writeFiles',
message: 'There was an issue writing output files.',
payload: JSON.stringify({})
},
null,
2
)
)
)
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(
`Writing output file ${process.env.HOME}/files_testKey.txt with error and files "json"`
)
)
})
})
/**
* @function writeOutput
*/
describe('...with function writeOutput...', () => {
it.each(getTestEvents(p.changedFilesInput('push'), 'push'))(
'...writeOutput %o',
(inputName, input, expected) => {
const coreDebug = require('@actions/core').debug
const coreSetOutput = require('@actions/core').setOutput
require('../FilesHelper').writeOutput(inputName, 'testKey', input)
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(input, null, 2))
)
expect(coreSetOutput).toHaveBeenCalledWith(`files_testKey`, expected)
}
)
it.each(getTestEvents(p.changedFilesInput('push'), 'push'))(
'...writeOutput %o with files key',
(inputName, input, expected) => {
const coreDebug = require('@actions/core').debug
const coreSetOutput = require('@actions/core').setOutput
require('../FilesHelper').writeOutput(inputName, 'files', input)
expect(coreDebug).toHaveBeenCalledWith(
expect.stringContaining(JSON.stringify(input, null, 2))
)
expect(coreSetOutput).toHaveBeenCalledWith(`files`, expected)
}
)
it('...throws error', () => {
const coreDebug = require('@actions/core').debug
expect(() =>
require('../FilesHelper').writeOutput('error', 'testKey', 'json')
).toThrowError(
new Error(
JSON.stringify(
{
error: '500/TypeError',
from: 'writeOutput',
message: 'There was an issue setting action outputs.',
payload: JSON.stringify({})
},
null,
2
)
)
)
expect(coreDebug).toHaveBeenCalledWith(
'Writing output files_testKey with error and files "json"'
)
})
})
})
})

View File

@ -0,0 +1,241 @@
import {Env, p, getTestEvents} from './mocks/env'
let env: Env
describe('Testing GithubHelper.ts...', () => {
describe.each(p.testEvents)('...with %s event...', event => {
beforeAll(() => {
env = new Env({}, {githubToken: 'TestToken'}, event)
})
afterEach(() => {
process.env = {...env.envStart}
jest.resetModules()
env = new Env({}, {}, event)
})
/**
* @function initClient
*/
describe('...with function initClientTests...', () => {
it.each(getTestEvents(p.initClientTestInputs, event))(
'...%s',
async (title, input, expected) => {
process.env = {...env.envStart}
env = new Env({}, {}, event)
let gh
if (title.includes('without a token'))
expect(() =>
require('../GithubHelper').initClient(input)
).toThrowError(
new Error(
JSON.stringify(
{
error: '500/Error',
from: 'initClient',
message:
'There was an error creating github client. Please check your token.',
payload: {}
},
null,
2
)
)
)
else {
gh = require('../GithubHelper').initClient(input)
const {GitHub} = require('@actions/github')
expect(GitHub).toHaveBeenCalledTimes(1)
expect(GitHub).toHaveBeenCalledWith(expected)
expect(gh).toEqual(env.octokitMock)
}
}
)
})
/**
* @function getChangedPRFiles
*/
describe('...with function getChangedPRFiles...', () => {
it.each(getTestEvents(p.getChangedPRFilesTestInputs, event))(
'...%s',
async (title, input, expected) => {
if (title.includes('throws an error')) {
expect.assertions(1)
await expect(
require('../GithubHelper').getChangedPRFiles(
env.octokitMock,
input.repo,
input.owner,
input.pullNumber
)
).rejects.toThrowError(new Error(JSON.stringify(expected, null, 2)))
} else {
let files: any[] = []
files = await require('../GithubHelper').getChangedPRFiles(
env.octokitMock,
input.repo,
input.owner,
input.pullNumber
)
expect(files).toStrictEqual(expected)
expect(files.length).toBe(7)
}
}
)
it('...throws errows', async () => {
await expect(
require('../GithubHelper').getChangedPRFiles(
env.octokitMock,
'trilom/file-changes-action',
'error',
'error'
)
).rejects.toThrowError(
new Error(
JSON.stringify(
{
error: '500/Unknown Error:Error',
from: 'getChangedPRFiles',
message:
'There was an error getting change files for repo:trilom/file-changes-action owner:error pr:error',
payload: JSON.stringify({name: 'HttpError', status: '500'})
},
null,
2
)
)
)
await expect(
require('../GithubHelper').getChangedPRFiles(
env.octokitMock,
'trilom/file-changes-action',
'unknown',
'unknown'
)
).rejects.toThrowError(
new Error(
JSON.stringify(
{
error: '500/Unknown Error:',
from: 'getChangedPRFiles',
message:
'There was an error getting change files for repo:trilom/file-changes-action owner:unknown pr:unknown',
payload: ''
},
null,
2
)
)
)
})
})
/**
* @function getChangedPushFiles
*/
describe('...with function getChangedPushFiles...', () => {
it.each(getTestEvents(p.getChangedPushFilesTestInputs, event))(
'...%s',
async (title, input, expected) => {
if (title.includes('throws an error')) {
expect.assertions(1)
await expect(
require('../GithubHelper').getChangedPushFiles(
env.octokitMock,
input.repo,
input.owner,
input.before,
input.after
)
).rejects.toThrowError(new Error(JSON.stringify(expected, null, 2)))
} else {
let files: any[] = []
files = await require('../GithubHelper').getChangedPushFiles(
env.octokitMock,
input.repo,
input.owner,
input.before,
input.after
)
expect(files).toStrictEqual(expected)
expect(files.length).toBe(7)
}
}
)
it('...throws errows', async () => {
await expect(
require('../GithubHelper').getChangedPushFiles(
env.octokitMock,
'trilom/file-changes-action',
'error',
'error',
'error'
)
).rejects.toThrowError(
new Error(
JSON.stringify(
{
error: '500/Unknown Error:Error',
from: 'getChangedPushFiles',
message:
'There was an error getting change files for repo:trilom/file-changes-action owner:error base:error head:error',
payload: JSON.stringify({name: 'HttpError', status: '500'})
},
null,
2
)
)
)
await expect(
require('../GithubHelper').getChangedPushFiles(
env.octokitMock,
'trilom/file-changes-action',
'unknown',
'unknown',
'unknown'
)
).rejects.toThrowError(
new Error(
JSON.stringify(
{
error: '500/Unknown Error:',
from: 'getChangedPushFiles',
message:
'There was an error getting change files for repo:trilom/file-changes-action owner:unknown base:unknown head:unknown',
payload: ''
},
null,
2
)
)
)
})
})
/**
* @function getChangedFiles
*/
describe('...with function getChangedFiles...', () => {
it.each(getTestEvents(p.getChangedFilesTestInputs, event))(
'...%s',
async (title, input, expected) => {
if (title.includes('throws an error')) {
expect.assertions(1)
await expect(
require('../GithubHelper').getChangedFiles(
env.octokitMock,
input.repo,
{...input}
)
).rejects.toThrowError(new Error(JSON.stringify(expected, null, 2)))
} else {
let files: any[] = []
files = await require('../GithubHelper').getChangedFiles(
env.octokitMock,
input.repo,
{...input}
)
expect(files).toStrictEqual(expected)
expect(files.length).toBe(7)
}
}
)
})
})
})

View File

@ -0,0 +1,222 @@
import {Env, eventName, getTestEvents, p} from './mocks/env'
let env: Env
describe('Testing InputHelper.ts...', () => {
describe.each(p.testEvents)('...with %s event...', event => {
beforeAll(() => {
env = new Env({}, {githubToken: 'TestToken'}, event)
})
afterEach(() => {
process.env = {...env.envStart}
jest.resetModules()
env = new Env({}, {}, event)
})
/**
* @function getInputs
*/
describe('...with function getInputs...', () => {
it('...sets correct default input parameters.', () => {
const {payload, issue, eventName: contextEventName} = env.context
const {
prNumber,
pushAfter,
pushBefore,
githubToken,
githubRepo,
output,
fileOutput,
event: inputEventName
} = require('../InputHelper').getInputs()
const {getInput} = require('@actions/core')
if (event.includes('push')) {
expect(prNumber).toBe(NaN)
expect(pushAfter).toBe(payload.after)
expect(pushBefore).toBe(payload.before)
}
if (event.includes('pull_request') || event.includes('issue_comment')) {
expect(prNumber).toBe(issue.number)
if (event === 'pull_request_synchronize') {
expect(pushAfter).toBe(payload.after)
expect(pushBefore).toBe(payload.before)
} else {
expect(pushAfter).toBeFalsy()
expect(pushBefore).toBeFalsy()
}
}
expect(githubToken).toBe(process.env.INPUT_GITHUBTOKEN)
expect(githubRepo).toBe(process.env.GITHUB_REPOSITORY)
expect(output).toBe(' ')
expect(fileOutput).toBe(' ')
expect(inputEventName).toBe(contextEventName)
expect(getInput).toHaveBeenCalled()
})
it('...throws error with no token (undefined) process.env["GITHUB_TOKEN"] or (undefined) input githubToken', () => {
delete process.env.GITHUB_TOKEN
delete process.env.INPUT_GITHUBTOKEN
const {getInput} = require('@actions/core')
expect(() => {
require('../InputHelper').getInputs()
}).toThrowError()
expect(getInput).toHaveBeenCalledTimes(1)
})
it('...throws error with empty string ("") process.env["GITHUB_TOKEN"] or empty string ("") input githubToken', () => {
env.updateInput({githubToken: ''})
process.env.GITHUB_TOKEN = ''
const {getInput} = require('@actions/core')
expect(() => {
require('../InputHelper').getInputs()
}).toThrowError()
expect(getInput).toHaveBeenCalledTimes(1)
})
it.each(getTestEvents(p.inputTestInputs, event))(
'...sets %s input "%s" should be %p',
(inputName, input, expected) => {
env.updateInput({[inputName]: input})
const {payload, issue, eventName: contextEventName} = env.context
const {
prNumber,
pushAfter,
pushBefore,
githubToken,
githubRepo,
output,
fileOutput,
event: inputEventName
} = require('../InputHelper').getInputs()
const {getInput} = require('@actions/core')
if (event.includes('push')) {
expect(prNumber).toBe(inputName === 'prNumber' ? expected : NaN)
expect(pushAfter).toBe(
inputName === 'pushAfter' ? expected : payload.after
)
expect(pushBefore).toBe(
inputName === 'pushBefore' ? expected : payload.before
)
}
if (
event.includes('pull_request') ||
event.includes('issue_comment')
) {
expect(prNumber).toBe(
inputName === 'prNumber' ? expected : issue.number
)
if (event === 'pull_request_synchronize') {
expect(pushAfter).toBe(
inputName === 'pushAfter' ? expected : payload.after
)
expect(pushBefore).toBe(
inputName === 'pushBefore' ? expected : payload.before
)
} else {
expect(pushAfter).toBe(
inputName === 'pushAfter' ? expected : false
)
expect(pushBefore).toBe(
inputName === 'pushBefore' ? expected : false
)
}
}
expect(githubToken).toBe(
inputName === 'githubToken' ? expected : 'EnvDefaultToken'
)
expect(githubRepo).toBe(
inputName === 'githubRepo'
? expected
: process.env.GITHUB_REPOSITORY
)
expect(output).toBe(inputName === 'output' ? expected : ' ')
expect(fileOutput).toBe(inputName === 'fileOutput' ? expected : ' ')
expect(inputEventName).toBe(contextEventName)
expect(getInput).toBeCalled()
}
)
})
/**
* @function inferInput
*/
describe('...with function inferInput...', () => {
it.each(getTestEvents(p.inferTestInputs, event))(
'...%s',
(title, input, expected) => {
const {error} = require('@actions/core')
const {warning} = require('@actions/core')
if (title.includes('ERROR with no')) {
expect(() => {
require('../InputHelper').inferInput(
input.before,
input.after,
input.pr
)
}).toThrowError(
new Error(
JSON.stringify(
{
error: '500/inferInput Error',
from: 'inferInput',
message: `Received event from ${eventName(
event
)}, but received no inputs. {event_name:${eventName(
event
)}, pr: NaN, before:, after:}`,
payload: ''
},
null,
2
)
)
)
} else if (title.includes('ERROR with single')) {
expect(() => {
require('../InputHelper').inferInput(
input.before,
input.after,
input.pr
)
}).toThrowError(
new Error(
JSON.stringify(
{
error: '500/inferInput Error',
from: 'inferInput',
message: `Received event from ${eventName(
event
)}, but only received a before(${input.before}) or after(${
input.after
}).\n I need both of these if you want to use a Push event.`,
payload: ''
},
null,
2
)
)
)
} else {
const data = require('../InputHelper').inferInput(
input.before,
input.after,
input.pr
)
Object.keys(data).forEach(key =>
expect(data[key]).toBe(expected[key])
)
expect(error).not.toHaveBeenCalled()
}
if (title.includes('WARN weird'))
expect(warning).toHaveBeenCalledWith(
expect.stringContaining(
`received a before(${input.before}) or after(${input.after}) value.`
)
)
if (title.includes('WARN all'))
expect(warning).toHaveBeenCalledWith(
expect.stringContaining(
`but received a before(${input.before}), after(${input.after}), and PR(${input.pr}).`
)
)
else expect(error).not.toHaveBeenCalled()
}
)
})
})
})

View File

@ -0,0 +1,67 @@
import {Env, p, getTestEvents} from './mocks/env'
let env: Env
describe('Testing UtilsHelper.ts...', () => {
describe('...with push event...', () => {
beforeAll(() => {
env = new Env({}, {githubToken: 'TestToken'}, 'push')
})
afterEach(() => {
process.env = {...env.envStart}
jest.resetModules()
env = new Env({}, {}, 'push')
})
/**
* @function getErrorString
*/
describe('...with function getErrorString...', () => {
it('...can throw an error', () => {
const error = require('../UtilsHelper').getErrorString()
expect(JSON.stringify(JSON.parse(error))).toBe(
JSON.stringify({error: '500/undefined', payload: ''})
)
})
it('...can throw an error for my error', () => {
const {setFailed, error: coreError} = require('@actions/core')
const obj = {a: {}}
obj.a = {b: obj}
expect(() =>
require('../UtilsHelper').getErrorString(
'test',
200,
'test',
'test',
obj
)
).toThrowError(
JSON.stringify({
name: '500/undefined',
message: 'Error throwing error.'
})
)
// expect(JSON.stringify(JSON.parse(error))).toBe(JSON.stringify({error:'500/undefined', payload:''}))
expect(setFailed).toBeCalledWith(
expect.stringContaining('Error throwing error.')
)
expect(coreError).toBeCalledWith(
expect.stringContaining('Error throwing error.')
)
})
})
/**
* @function errorMessage
*/
describe('...with function errorMessage...', () => {
it.each(getTestEvents(p.errorMessageInputs, 'push'))(
'...for function %s',
(f, e, expected) => {
const error = require('../UtilsHelper').errorMessage(f, e)
expect(error).toBe(
`${expected}\nException: ${JSON.stringify(e, null, 2)}`
)
}
)
})
})
})

View File

@ -0,0 +1,129 @@
import {existsSync, mkdirSync, readFileSync, rmdirSync, unlinkSync} from 'fs'
import {resolve as _resolve} from 'path'
import {
eventName as formatEventName,
formatInput,
getTestEvents,
p
} from './mocks/env'
// debugger
const pEnv: {[key: string]: string | undefined} = {...process.env}
let processStdoutMock: jest.SpyInstance
let consoleLogMock: jest.SpyInstance
let output = ''
describe.each(p.testEvents)('Testing main.ts with %s event...', event => {
/**
* @function run
*/
describe.each(getTestEvents(p.mainInputs, event))(
'...function run with %s event inputs non mocked...',
(eventName, eventInput, eventExpected) => {
describe.each(getTestEvents(p.getFormatExtInputs, 'push'))(
'...with output %s...',
(outputName, outputInput, outputExpected) => {
describe.each(getTestEvents(p.getFormatExtInputs, 'push'))(
'...with fileOutput %s...',
(fileOutputName, fileOutputInput, fileOutputExpected) => {
beforeEach(() => {
consoleLogMock = jest
.spyOn(console, 'log')
.mockImplementation((message: string) => {
output += ` ${message}`
})
processStdoutMock = jest
.spyOn(process.stdout, 'write')
.mockImplementation(
(
command: string | Uint8Array,
encoding?: string,
cb?: () => void
) => {
output += ` ${command}`
return false
}
)
mkdirSync(
_resolve(
__dirname,
`outputs/${event}/${eventName}/o_${outputName}f_${fileOutputName}`
),
{recursive: true}
)
process.env = {
HOME: _resolve(
__dirname,
`outputs/${event}/${eventName}/o_${outputName}f_${fileOutputName}`
),
GITHUB_EVENT_NAME: formatEventName(event),
GITHUB_EVENT_PATH: _resolve(
__dirname,
`mocks/env/events/${event}.json`
),
...formatInput({
githubRepo: 'trilom/file-changes-action',
githubToken: process.env.GITHUB_TOKEN || '',
output: outputInput,
fileOutput: fileOutputInput,
...eventInput
})
}
})
afterEach(() => {
process.env = {...pEnv}
output = ''
jest.restoreAllMocks()
})
it('...no-mock', async () => {
await expect(require('../main').run()).resolves.toBe(undefined)
const counts = {
files: 73,
files_added: 52,
files_modified: 13,
files_removed: 8
} as {[key: string]: number}
Object.keys(counts).forEach(async key => {
expect(output).toContain(`::set-output name=${key}`)
expect(
existsSync(
_resolve(
__dirname,
`outputs/${event}/${eventName}/o_${outputName}f_${fileOutputName}/${key}${fileOutputExpected}`
)
)
).toBeTruthy()
if (fileOutputExpected === '.json') {
expect(
JSON.parse(
readFileSync(
_resolve(
__dirname,
`outputs/${event}/${eventName}/o_${outputName}f_${fileOutputName}/${key}${fileOutputExpected}`
),
'utf8'
)
)
).toHaveLength(counts[key])
} else {
expect(
readFileSync(
_resolve(
__dirname,
`outputs/${event}/${eventName}/o_${outputName}f_${fileOutputName}/${key}${fileOutputExpected}`
),
'utf8'
).split(fileOutputInput)
).toHaveLength(counts[key])
}
})
}, 10000)
}
)
}
)
}
)
})

View File

@ -0,0 +1,89 @@
import {Env, getTestEvents, getTestFiles, p} from './mocks/env'
let env: Env
describe('Testing main.ts...', () => {
describe.each(p.testEvents)('...with %s event...', event => {
/**
* @function run
*/
describe('...with function run...', () => {
describe.each(getTestEvents(p.getFormatExtInputs, 'push'))(
'...with fileOutput %s...',
(fileOutputName, fileOutputInput, fileOutputExpected) => {
describe.each(getTestEvents(p.getFormatExtInputs, 'push'))(
'...with output %o...',
(outputName, outputInput, outputExpected) => {
describe.each(getTestEvents(p.mainInputs, event))(
'...with %s event inputs mocked...',
(eventName, eventInput, eventExpected) => {
beforeEach(() => {
env = new Env(
{},
{
githubRepo: 'trilom/file-changes-action',
githubToken: 'TestToken',
output: outputInput,
fileOutput: fileOutputInput,
...eventInput,
mock: 'true'
},
event
)
})
afterEach(() => {
process.env = env.envStart
jest.resetModules()
jest.unmock('@actions/core')
jest.unmock('@actions/github')
jest.unmock('../InputHelper')
jest.unmock('../FilesHelper')
jest.unmock('../GithubHelper')
})
it('...mocked', async () => {
const githubHelper = require('../GithubHelper')
const filesHelper = require('../FilesHelper')
githubHelper.getChangedFiles = jest.fn(
() => getTestFiles().files
)
filesHelper.writeOutput = jest.fn(() => {})
filesHelper.writeFiles = jest.fn(() => {})
await expect(require('../main').run()).resolves.toBe(
undefined
)
expect(githubHelper.getChangedFiles).toBeCalled()
expect(filesHelper.writeOutput).toBeCalled()
expect(filesHelper.writeFiles).toBeCalled()
})
it.each(getTestEvents(p.mainErrorInputs, 'push'))(
'...throws error for mocked function %s...',
async (f, e, expected) => {
const inputHelper = require('../InputHelper')
let thrown = false
inputHelper.getInputs = jest.fn(() => {
thrown = true
throw new Error(e)
})
await expect(
require('../main').run()
).rejects.toThrowError(
new Error(
JSON.stringify({
name: 'Error',
message: 'Error',
from: f
})
)
)
expect(inputHelper.getInputs).toHaveBeenCalledTimes(1)
}
)
}
)
}
)
}
)
})
})
})

View File

@ -0,0 +1,55 @@
import {mock} from '.'
const core = mock()
describe('Testing CoreMock object...', () => {
beforeAll(() => jest.restoreAllMocks())
it('...CoreMock is a mock', () => {
expect(jest.isMockFunction(core.getInput)).toBe(true)
expect(jest.isMockFunction(core.setFailed)).toBe(true)
expect(jest.isMockFunction(core.setOutput)).toBe(true)
expect(jest.isMockFunction(core.debug)).toBe(true)
expect(jest.isMockFunction(core.warning)).toBe(true)
expect(jest.isMockFunction(core.info)).toBe(true)
expect(jest.isMockFunction(core.error)).toBe(true)
})
it('...CoreMock mocks core', () => {
const realCore = require('@actions/core')
expect(core).toMatchObject(realCore)
})
it('...CoreMock mocks setFailed', () => {
core.setFailed('Test Message')
expect(core.error).toBeCalledWith('Test Message')
expect(core.setFailed).toBeCalledWith('Test Message')
})
it('...CoreMock mocks setOutput', () => {
core.setOutput('TestName', 'TestValue')
expect(core.setOutput).toBeCalledWith('TestName', 'TestValue')
})
it('...CoreMock mocks setOutput error', () => {
expect(() => core.setOutput('ERROROUTPUT', 'TestValue')).toThrowError(
new Error(JSON.stringify({name: 'CoreError', status: '500'}))
)
})
it('...CoreMock mocks getInput', () => {
process.env.INPUT_TEST = 'TESTINPUT'
const input = core.getInput('TEST')
expect(input).toBe('TESTINPUT')
})
it('...CoreMock mocks debug', () => {
core.debug('Test Message')
expect(core.debug).toBeCalledWith('Test Message')
})
it('...CoreMock mocks warning', () => {
core.warning('Test Message')
expect(core.warning).toBeCalledWith('Test Message')
})
it('...CoreMock mocks info', () => {
core.info('Test Message')
expect(core.info).toBeCalledWith('Test Message')
})
it('...CoreMock mocks error', () => {
core.error('Test Message')
expect(core.error).toBeCalledWith('Test Message')
})
})

View File

@ -0,0 +1,34 @@
import {CoreMock} from 'typings/CoreMock'
const coreMock: CoreMock = {
setFailed: jest.fn(message => {
coreMock.error(message)
// console.error(`setFailed triggered`)
}),
setOutput: jest.fn((name, value) => {
if (name === 'ERROROUTPUT')
throw new Error(JSON.stringify({name: 'CoreError', status: '500'}))
// console.log(`setOutputName: ${name} value: ${value}`)
}),
// eslint-disable-next-line @typescript-eslint/no-unused-vars
getInput: jest.fn((name, options) => {
return process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`]
}),
debug: jest.fn(message => {
// console.debug(`core.debug triggered: ${message}`)
}),
warning: jest.fn(message => {
// console.warn(`core.warning triggered: ${message}`)
}),
info: jest.fn(message => {
// console.info(`core.info triggered: ${message}`)
}),
error: jest.fn(message => {
// console.error(`core.error triggered: ${message}`)
})
}
export function mock(): CoreMock {
jest.mock('@actions/core', () => coreMock)
return coreMock
}

View File

@ -0,0 +1,23 @@
import {mock} from '.'
const fs = mock()
describe('Testing FsMock object...', () => {
beforeAll(() => jest.restoreAllMocks())
it('...FsMock is a mock', () => {
expect(jest.isMockFunction(fs.writeFileSync)).toBe(true)
})
it('...FsMock mocks fs', () => {
const realFs = require('fs')
expect(fs).toMatchObject(realFs)
})
it('...FsMock mocks writeFileSync', () => {
fs.writeFileSync('a', 'b', 'c')
expect(fs.writeFileSync).toBeCalledWith('a', 'b', 'c')
})
it('...FsMock mocks an error', async () => {
expect(() => fs.writeFileSync('error', 'b', 'c')).toThrowError(
new Error(JSON.stringify({name: 'PathError', status: '500'}))
)
})
})

View File

@ -0,0 +1,14 @@
import {FsMock} from 'typings/FsMock'
const fsMock = {
writeFileSync: jest.fn((path, data, options) => {
if (path === 'error')
throw new Error(JSON.stringify({name: 'PathError', status: '500'}))
// console.log(`fs.writeFileSync triggered with path: ${path} data: ${data} options: ${options}`)
})
}
export function mock(): FsMock {
jest.mock('fs', () => fsMock)
return fsMock
}

View File

@ -0,0 +1,29 @@
import {Context} from '@actions/github/lib/context'
import {mock} from '.'
import {octokitMock} from '../octokit'
const github = mock()
describe('Testing GitHubMock object ...', () => {
beforeAll(() => jest.restoreAllMocks())
it('...GitHubMock is a mock', () => {
expect(jest.isMockFunction(github.github.GitHub)).toBe(true)
expect(github.context).toMatchObject(new Context())
})
it('...GitHubMock mocks GitHub', () => {
const {GitHub} = require('@actions/github')
const mockGitHub = GitHub('test')
expect(mockGitHub).toMatchObject(octokitMock)
})
it('...GitHubMock mocks unauthorized GitHub', () => {
const GitHub = mock()
expect(jest.isMockFunction(GitHub.github.GitHub)).toBe(true)
})
it('...GitHubMock mocks authorizing GitHub', () => {
const GitHub = mock()
const octokit = GitHub.github.GitHub('token')
expect(jest.isMockFunction(GitHub.github.GitHub)).toBe(true)
expect(GitHub.github.GitHub).toBeCalledWith('token')
expect(octokit).toMatchObject(octokitMock)
})
})

View File

@ -0,0 +1,29 @@
import {Context} from '@actions/github/lib/context'
import {GitHubMock} from 'typings/GitHubMock'
import {OctokitMock} from 'typings/OctokitMock'
import {octokitMock} from '../octokit'
function getGitHubMock(context: Context): GitHubMock {
return {
GitHub: jest.fn(token => {
// console.log(`I am authorizing GitHub with token: ${token}`)
if (!token)
throw new Error(
JSON.stringify({name: 'GithubInitError', status: '500'})
)
return octokitMock
}),
context
}
}
export function mock(): {
github: GitHubMock
octokit: OctokitMock
context: Context
} {
const context = new Context()
const github = getGitHubMock(context)
jest.mock('@actions/github', () => github)
return {github, octokit: octokitMock, context}
}

View File

@ -0,0 +1,48 @@
import {EndpointOptions} from '@octokit/types'
import {fn as merge} from './merge'
import {
OctokitPullsListFilesEndpointMergeRequest,
OctokitPullsListFilesEndpointMergeResponse,
OctokitReposCompareCommitsEndpointMergeRequest,
OctokitReposCompareCommitsEndpointMergeResponse
} from '../payloads'
describe('Testing Octokit object...', () => {
beforeAll(() => {
jest.restoreAllMocks()
})
it('...endpoint.merge returns 500 error with pull_number "error"', () => {
expect(() => {
merge(({pull_number: 'error'} as unknown) as EndpointOptions)
}).toThrowError(
new Error(JSON.stringify({name: 'HttpError', status: '500'}))
)
})
it('...endpoint.merge returns 500 error with base "error"', () => {
expect(() => {
merge(({base: 'error'} as unknown) as EndpointOptions)
}).toThrowError(
new Error(JSON.stringify({name: 'HttpError', status: '500'}))
)
})
it('...endpoint.merge returns empty object', async () => {
const request = OctokitPullsListFilesEndpointMergeRequest
const data = merge({...request, pull_number: NaN})
expect(data).toStrictEqual({
...OctokitPullsListFilesEndpointMergeResponse,
...{pull_number: NaN, base: '', head: ''}
})
})
it('...endpoint.merge for pull request', () => {
const request = OctokitPullsListFilesEndpointMergeRequest
const response = OctokitPullsListFilesEndpointMergeResponse
const data = merge(request)
expect(data).toStrictEqual(response)
})
it('...endpoint.merge for push', () => {
const request = OctokitReposCompareCommitsEndpointMergeRequest
const response = OctokitReposCompareCommitsEndpointMergeResponse
const data = merge(request)
expect(data).toStrictEqual(response)
})
})

View File

@ -0,0 +1,39 @@
// Import Request and Response Objects
import {EndpointOptions, RequestOptions} from '@octokit/types'
import {
// OctokitReposCompareCommitsEndpointMergeRequest,
OctokitReposCompareCommitsEndpointMergeResponse,
// OctokitPullsListFilesEndpointMergeRequest,
OctokitPullsListFilesEndpointMergeResponse
} from '../payloads'
// Form and export Response Objects
export {OctokitReposCompareCommitsEndpointMergeResponse as pushResponse}
export {OctokitPullsListFilesEndpointMergeResponse as prResponse}
// Export mock function
export const fn = jest.fn((data: EndpointOptions, response?: number) => {
if (data.base === 'error' || data.pull_number === 'error') {
throw new Error(JSON.stringify({name: 'HttpError', status: '500'}))
}
if (data.base === 'unknown' || data.pull_number === 'unknown') {
throw JSON.stringify({idk: 'error', message: 'test'})
}
if (
(!data.base && !data.head && Number.isNaN(data.pull_number)) ||
(!data.base && data.head) ||
(data.base && !data.head)
)
return {
...OctokitPullsListFilesEndpointMergeResponse,
...{pull_number: NaN, base: '', head: ''}
} as RequestOptions
if (data.pull_number) {
return {
...OctokitPullsListFilesEndpointMergeResponse,
...data
} as RequestOptions
}
return {
...OctokitReposCompareCommitsEndpointMergeResponse,
...data
} as RequestOptions
})

View File

@ -0,0 +1,11 @@
import {octokitMock} from '.'
describe('Testing Octokit object ...', () => {
beforeAll(() => jest.restoreAllMocks())
it('...Octokit is a mock', () => {
expect(octokitMock).toHaveProperty('paginate')
expect(octokitMock).toHaveProperty('pulls')
expect(octokitMock).toHaveProperty('repos')
expect(octokitMock).not.toHaveProperty('actions')
})
})

Some files were not shown because too many files have changed in this diff Show More