diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000..79621be8
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,9 @@
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+indent_style = space
+indent_size = 2
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000..94f480de
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto eol=lf
\ No newline at end of file
diff --git a/.github/workflows/pull-request-verification.yml b/.github/workflows/pull-request-verification.yml
index 567a4d00..71fd0913 100644
--- a/.github/workflows/pull-request-verification.yml
+++ b/.github/workflows/pull-request-verification.yml
@@ -3,6 +3,7 @@ on:
pull_request:
branches:
- master
+ - develop
jobs:
build:
@@ -53,3 +54,63 @@ jobs:
- name: filter-test
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
run: exit 1
+
+ test-wd-without-token:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ path: somewhere
+ - uses: ./somewhere
+ id: filter
+ with:
+ token: ''
+ working-directory: somewhere
+ filters: '.github/filters.yml'
+ - name: filter-test
+ if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
+ run: exit 1
+
+ test-change-type:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: configure GIT user
+ run: git config user.email "john@nowhere.local" && git config user.name "John Doe"
+ - name: modify working tree
+ run: touch add.txt && rm README.md && echo "TEST" > LICENSE
+ - name: commit changes
+ run: git add -A && git commit -a -m 'testing this action'
+ - uses: ./
+ id: filter
+ with:
+ token: ''
+ list-files: shell
+ filters: |
+ added:
+ - added: "add.txt"
+ deleted:
+ - deleted: "README.md"
+ modified:
+ - modified: "LICENSE"
+ any:
+ - added|deleted|modified: "*"
+ - name: Print 'added_files'
+ run: echo ${{steps.filter.outputs.added_files}}
+ - name: Print 'modified_files'
+ run: echo ${{steps.filter.outputs.modified_files}}
+ - name: Print 'deleted_files'
+ run: echo ${{steps.filter.outputs.deleted_files}}
+ - name: filter-test
+ # only single quotes are supported in GH action literal
+ # single quote needs to be escaped with single quote
+ # '''add.txt''' resolves to string 'add.txt'
+ if: |
+ steps.filter.outputs.added != 'true'
+ || steps.filter.outputs.deleted != 'true'
+ || steps.filter.outputs.modified != 'true'
+ || steps.filter.outputs.any != 'true'
+ || steps.filter.outputs.added_files != '''add.txt'''
+ || steps.filter.outputs.modified_files != '''LICENSE'''
+ || steps.filter.outputs.deleted_files != '''README.md'''
+ run: exit 1
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4f591ac2..c0b67013 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# Changelog
+## v2.3.0
+- [Improved documentation](https://github.com/dorny/paths-filter/pull/37)
+- [Change detection using git "three dot" diff](https://github.com/dorny/paths-filter/pull/35)
+- [Export files matching filter](https://github.com/dorny/paths-filter/pull/32)
+- [Extend filter syntax with optional specification of file status: add, modified, deleted](https://github.com/dorny/paths-filter/pull/22)
+- [Add working-directory input](https://github.com/dorny/paths-filter/pull/21)
+
## v2.2.1
- [Add support for pull_request_target](https://github.com/dorny/paths-filter/pull/29)
@@ -21,4 +28,4 @@
Updated dependencies - fixes github security alert
## v1.0.0
-First official release uploaded to marketplace.
\ No newline at end of file
+First official release uploaded to marketplace.
diff --git a/README.md b/README.md
index dadbf7ad..e367db60 100644
--- a/README.md
+++ b/README.md
@@ -1,79 +1,151 @@
-
-
-
-# Paths filter
+# paths-filter
-With this [Github Action](https://github.com/features/actions) you can execute your workflow steps only if relevant files are modified.
+This [Github Action](https://github.com/features/actions) enables conditional execution of workflow steps and jobs,
+based on the paths that are modified by pull request or in pushed commits.
It saves time and resources especially in monorepo setups, where you can run slow tasks (e.g. integration tests or deployments) only for changed components.
-Github workflows built-in [path filters](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
+Github workflows built-in [path filters](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
doesn't allow this because they doesn't work on a level of individual jobs or steps.
-Supported workflows:
-- Action triggered by **[pull_request](https://help.github.com/en/actions/reference/events-that-trigger-workflows#pull-request-event-pull_request)** event:
- - changes detected against the pull request base branch
-- Action triggered by **[push](https://help.github.com/en/actions/reference/events-that-trigger-workflows#push-event-push)** event:
- - changes detected against the most recent commit on the same branch before the push
- - changes detected against the top of the configured *base* branch (e.g. master)
-
-## Usage
-
-Filter rules are defined using YAML format.
-Each filter rule is a list of [glob expressions](https://github.com/isaacs/minimatch).
-Corresponding output variable will be created to indicate if there's a changed file matching any of the rule glob expressions.
-Output variables can be later used in the `if` clause to conditionally run specific steps.
-
-### Inputs
-- **`token`**: GitHub Access Token - defaults to `${{ github.token }}` so you don't have to explicitly provide it.
-- **`base`**: Git reference (e.g. branch name) against which the changes will be detected. Defaults to repository default branch (e.g. master).
- If it references same branch it was pushed to, changes are detected against the most recent commit before the push.
- This option is ignored if action is triggered by *pull_request* event.
-- **`filters`**: Path to the configuration file or directly embedded string in YAML format. Filter configuration is a dictionary, where keys specifies rule names and values are lists of file path patterns.
-
-### Outputs
-- For each rule it sets output variable named by the rule to text:
- - `'true'` - if **any** of changed files matches any of rule patterns
- - `'false'` - if **none** of changed files matches any of rule patterns
-
-### Notes
-- minimatch [dot](https://www.npmjs.com/package/minimatch#dot) option is set to true - therefore
- globbing will match also paths where file or folder name starts with a dot.
-- You can use YAML anchors to reuse path expression(s) inside another rule. See example in the tests.
-- If changes are detected against the previous commit and there is none (i.e. first push of a new branch), all filter rules will report changed files.
-- You can use `base: ${{ github.ref }}` to configure change detection against previous commit for every branch you create.
-
-### Example
+
+## Supported workflows:
+- Pull requests:
+ - Action triggered by **[pull_request](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request)**
+ or **[pull_request_target](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request_target)** event
+ - Changes are detected against the pull request base branch
+ - Uses Github REST API to fetch list of modified files
+- Feature branches:
+ - Action triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
+ - Changes are detected against the merge-base with configured base branch
+ - Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
+- Master, Release or other long-lived branches:
+ - Action triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
+ - Changes are detected against the most recent commit on the same branch before the push
+ - Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
+
+
+## Important notes:
+- Paths expressions are evaluated using [minimatch](https://github.com/isaacs/minimatch) library.
+ Documentation for path expression format can be found on project github page.
+- Minimatch [dot](https://www.npmjs.com/package/minimatch#dot) option is set to true.
+ Globbing will match also paths where file or folder name starts with a dot.
+- It's recommended to quote your path expressions with `'` or `"`. Otherwise you will get an error if it starts with `*`.
+
+
+# What's New
+
+- Fixed behavior of feature branch workflow:
+ - Detects only changes introduced by feature branch. Later modifications on base branch are ignored.
+- Filter by type of file change:
+ - Optionally consider if file was added, modified or deleted
+- Custom processing of changed files:
+ - Optionally export paths of all files matching the filter
+ - Output can be space-delimited or in JSON format
+- Improved documentation and logging
+
+For more information see [CHANGELOG](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
+
+# Usage
+
+```yaml
+- uses: dorny/paths-filter@v2
+ with:
+ # Defines filters applied to detected changed files.
+ # Each filter has a name and list of rules.
+ # Rule is a glob expression - paths of all changed
+ # files are matched against it.
+ # Rule can optionally specify if the file
+ # should be added, modified or deleted.
+ # For each filter there will be corresponding output variable to
+ # indicate if there's a changed file matching any of the rules.
+ # Optionally there can be a second output variable
+ # set to list of all files matching the filter.
+ # Filters can be provided inline as a string (containing valid YAML document)
+ # or as a relative path to separate file (e.g.: .github/filters.yaml).
+ # Multiline string is evaluated as embedded filter definition,
+ # single line string is evaluated as relative path to separate file.
+ # Filters syntax is documented by example - see examples section.
+ filters: ''
+
+ # Branch against which the changes will be detected.
+ # If it references same branch it was pushed to,
+ # changes are detected against the most recent commit before the push.
+ # Otherwise it uses git merge-base to find best common ancestor between
+ # current branch (HEAD) and base.
+ # When merge-base is found, it's used for change detection - only changes
+ # introduced by current branch are considered.
+ # All files are considered as added if there is no common ancestor with
+ # base branch or no previous commit.
+ # This option is ignored if action is triggered by pull_request event.
+ # Default: repository default branch (e.g. master)
+ base: ''
+
+ # How many commits are initially fetched from base branch.
+ # If needed, each subsequent fetch doubles the
+ # previously requested number of commits until the merge-base
+ # is found or there are no more commits in the history.
+ # This option takes effect only when changes are detected
+ # using git against base branch (feature branch workflow).
+ # Default: 20
+ initial-fetch-depth: ''
+
+ # Enables listing of files matching the filter:
+ # 'none' - Disables listing of matching files (default).
+ # 'json' - Matching files paths are formatted as JSON array.
+ # 'shell' - Matching files paths are escaped and space-delimited.
+ # Output is usable as command line argument list in linux shell.
+ # Default: none
+ list-files: ''
+
+ # Relative path under $GITHUB_WORKSPACE where the repository was checked out.
+ working-directory: ''
+
+ # Personal access token used to fetch list of changed files
+ # from Github REST API.
+ # It's used only if action is triggered by pull request event.
+ # Github token from workflow context is used as default value.
+ # If empty string is provided, action falls back to detect
+ # changes using git commands.
+ # Default: ${{ github.token }}
+ token: ''
+```
+
+## Outputs
+- For each filter it sets output variable named by the filter to the text:
+ - `'true'` - if **any** of changed files matches any of filter rules
+ - `'false'` - if **none** of changed files matches any of filter rules
+- If enabled, for each filter it sets output variable with name `${FILTER_NAME}_files`. It will contain list of all files matching the filter.
+
+# Examples
+
+## Conditional execution
+
+
+ Execute step in a workflow job only if some file in a subfolder is changed
+
```yaml
-on:
- push:
- branches:
- - master
- pull_request:
- branches:
- - master
jobs:
tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - uses: dorny/paths-filter@v2.2.1
+ - uses: dorny/paths-filter@v2
id: filter
with:
- # inline YAML or path to separate file (e.g.: .github/filters.yaml)
filters: |
backend:
- - 'backend/**/*'
+ - 'backend/**'
frontend:
- - 'frontend/**/*'
+ - 'frontend/**'
# run only if 'backend' files were changed
- - name: backend unit tests
+ - name: backend tests
if: steps.filter.outputs.backend == 'true'
run: ...
# run only if 'frontend' files were changed
- - name: frontend unit tests
+ - name: frontend tests
if: steps.filter.outputs.frontend == 'true'
run: ...
@@ -82,16 +154,14 @@ jobs:
if: steps.filter.outputs.backend == 'true' || steps.filter.outputs.frontend == 'true'
run: ...
```
+
+
+
+ Execute job in a workflow only if some file in a subfolder is changed
-If your workflow uses multiple jobs, you can put *paths-filter* into own job and use
-[job outputs](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjobs_idoutputs)
-in other jobs [if](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idif) statements:
```yml
-on:
- pull_request:
- branches:
- - master
jobs:
+ # JOB to run change detection
changes:
runs-on: ubuntu-latest
# Set job outputs to values from filter step
@@ -100,45 +170,213 @@ jobs:
frontend: ${{ steps.filter.outputs.frontend }}
steps:
# For pull requests it's not necessary to checkout the code
- - uses: dorny/paths-filter@v2.2.1
+ - uses: dorny/paths-filter@v2
id: filter
with:
- # Filters stored in own yaml file
- filters: '.github/filters.yml'
+ filters: |
+ backend:
+ - 'backend/**'
+ frontend:
+ - 'frontend/**'
+
+ # JOB to build and test backend code
backend:
needs: changes
if: ${{ needs.changes.outputs.backend == 'true' }}
+ runs-on: ubuntu-latest
steps:
+ - uses: actions/checkout@v2
- ...
+
+ # JOB to build and test frontend code
frontend:
needs: changes
if: ${{ needs.changes.outputs.frontend == 'true' }}
+ runs-on: ubuntu-latest
steps:
+ - uses: actions/checkout@v2
- ...
```
+
+
+## Change detection workflows
+
+
+ Pull requests: Detect changes against PR base branch
+
+```yaml
+on:
+ pull_request:
+ branches: # PRs to following branches will trigger the workflow
+ - master
+ - develop
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ filters: ... # Configure your filters
+```
+
+
+
+ Feature branch: Detect changes against configured base branch
+
+```yaml
+on:
+ push:
+ branches: # Push to following branches will trigger the workflow
+ - feature/**
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ base: develop # Change detection against merge-base with this branch
+ filters: ... # Configure your filters
+```
+
+
+
+ Long lived branches: Detect changes against the most recent commit on the same branch before the push
+
+```yaml
+on:
+ push:
+ branches: # Push to following branches will trigger the workflow
+ - master
+ - develop
+ - release/**
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # Use context to get branch where commits were pushed.
+ # If there is only one long lived branch (e.g. master),
+ # you can specify it directly.
+ # If it's not configured, the repository default branch is used.
+ base: ${{ github.ref }}
+ filters: ... # Configure your filters
+```
+
+
+## Advanced options
+
+
+ Define filter rules in own file
+
+```yaml
+- uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # Path to file where filters are defined
+ filters: .github/filters.yaml
+```
+
+
+
+ Use YAML anchors to reuse path expression(s) inside another rule
+
+```yaml
+- uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # &shared is YAML anchor,
+ # *shared references previously defined anchor
+ # src filter will match any path under common, config and src folders
+ filters: |
+ shared: &shared
+ - common/**
+ - config/**
+ src:
+ - *shared
+ - src/**
+```
+
+
+
+ Consider if file was added, modified or deleted
+
+```yaml
+- uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # Changed file can be 'added', 'modified', or 'deleted'.
+ # By default the type of change is not considered.
+ # Optionally it's possible to specify it using nested
+ # dictionary, where type(s) of change composes the key.
+ # Multiple change types can be specified using `|` as delimiter.
+ filters: |
+ addedOrModified:
+ - added|modified: '**'
+ allChanges:
+ - added|deleted|modified: '**'
+```
+
+
+
+## Custom processing of changed files
+
+
+ Passing list of modified files as command line args in Linux shell
+
+```yaml
+- uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # Enable listing of files matching each filter.
+ # Paths to files will be available in `${FILTER_NAME}_files` output variable.
+ # Paths will be escaped and space-delimited.
+ # Output is usable as command line argument list in linux shell
+ list-files: shell
+
+ # In this example changed files will be checked by linter.
+ # It doesn't make sense to lint deleted files.
+ # Therefore we specify we are only interested in added or modified files.
+ filters: |
+ markdown:
+ - added|modified: '*.md'
+- name: Lint Markdown
+ if: ${{ steps.filter.outputs.markdown == 'true' }}
+ run: npx textlint ${{ steps.filter.outputs.markdown_files }}
+```
+
+
+
+ Passing list of modified files as JSON array to another action
+
+```yaml
+- uses: dorny/paths-filter@v2
+ id: filter
+ with:
+ # Enable listing of files matching each filter.
+ # Paths to files will be available in `${FILTER_NAME}_files` output variable.
+ # Paths will be formatted as JSON array
+ list-files: json
+
+ # In this example all changed files are passed to following action to do
+ # some custom processing.
+ filters: |
+ changed:
+ - '**'
+- name: Lint Markdown
+ uses: johndoe/some-action@v1
+ with:
+ files: ${{ steps.filter.changed_files }}
+```
+
+
+
+# License
-## How it works
-
-1. If action was triggered by pull request:
- - If access token was provided it's used to fetch list of changed files from Github API.
- - If access token was not provided, top of the base branch is fetched and changed files are detected using `git diff-index ` command.
-2. If action was triggered by push event
- - if *base* input parameter references same branch it was pushed to, most recent commit before the push is fetched
- - If *base* input parameter references other branch, top of that branch is fetched
- - changed files are detected using `git diff-index FETCH_HEAD` command.
-3. For each filter rule it checks if there is any matching file
-4. Output variables are set
-
-## Difference from similar projects:
-
-- [Has Changed Path](https://github.com/MarceloPrado/has-changed-path)
- - detects changes from previous commit
- - you have to configure `checkout` action to fetch some number of previous commits
- - outputs only single `true` / `false` value if any of provided paths contains changes
-- [Changed Files Exporter](https://github.com/futuratrepadeira/changed-files)
- - outputs lists with paths of created, updated and deleted files
- - output is not directly usable in the `if` clause
-- [Changed File Filter](https://github.com/tony84727/changed-file-filter)
- - allows change detection between any refs or commits
- - fetches whole history of your git repository
- - might have negative performance impact on big repositories (github by default fetches only single commit)
+The scripts and documentation in this project are released under the [MIT License](https://github.com/dorny/paths-filter/blob/master/LICENSE)
diff --git a/__tests__/filter.test.ts b/__tests__/filter.test.ts
index b9b7c44c..74b9c7c9 100644
--- a/__tests__/filter.test.ts
+++ b/__tests__/filter.test.ts
@@ -1,4 +1,5 @@
-import Filter from '../src/filter'
+import {Filter} from '../src/filter'
+import {File, ChangeStatus} from '../src/file'
describe('yaml filter parsing tests', () => {
test('throws if yaml is not a dictionary', () => {
@@ -6,14 +7,6 @@ describe('yaml filter parsing tests', () => {
const t = () => new Filter(yaml)
expect(t).toThrow(/^Invalid filter.*/)
})
- test('throws on invalid yaml', () => {
- const yaml = `
- src:
- src/**/*.js
- `
- const t = () => new Filter(yaml)
- expect(t).toThrow(/^Invalid filter.*/)
- })
test('throws if pattern is not a string', () => {
const yaml = `
src:
@@ -27,14 +20,24 @@ describe('yaml filter parsing tests', () => {
})
describe('matching tests', () => {
+ test('matches single inline rule', () => {
+ const yaml = `
+ src: "src/**/*.js"
+ `
+ let filter = new Filter(yaml)
+ const files = modified(['src/app/module/file.js'])
+ const match = filter.match(files)
+ expect(match.src).toEqual(files)
+ })
test('matches single rule in single group', () => {
const yaml = `
src:
- src/**/*.js
`
const filter = new Filter(yaml)
- const match = filter.match(['src/app/module/file.js'])
- expect(match.src).toBeTruthy()
+ const files = modified(['src/app/module/file.js'])
+ const match = filter.match(files)
+ expect(match.src).toEqual(files)
})
test('no match when file is in different folder', () => {
@@ -43,8 +46,8 @@ describe('matching tests', () => {
- src/**/*.js
`
const filter = new Filter(yaml)
- const match = filter.match(['not_src/other_file.js'])
- expect(match.src).toBeFalsy()
+ const match = filter.match(modified(['not_src/other_file.js']))
+ expect(match.src).toEqual([])
})
test('match only within second groups ', () => {
@@ -55,9 +58,10 @@ describe('matching tests', () => {
- test/**/*.js
`
const filter = new Filter(yaml)
- const match = filter.match(['test/test.js'])
- expect(match.src).toBeFalsy()
- expect(match.test).toBeTruthy()
+ const files = modified(['test/test.js'])
+ const match = filter.match(files)
+ expect(match.src).toEqual([])
+ expect(match.test).toEqual(files)
})
test('match only withing second rule of single group', () => {
@@ -67,18 +71,20 @@ describe('matching tests', () => {
- test/**/*.js
`
const filter = new Filter(yaml)
- const match = filter.match(['test/test.js'])
- expect(match.src).toBeTruthy()
+ const files = modified(['test/test.js'])
+ const match = filter.match(files)
+ expect(match.src).toEqual(files)
})
test('matches anything', () => {
const yaml = `
any:
- - "**/*"
+ - "**"
`
const filter = new Filter(yaml)
- const match = filter.match(['test/test.js'])
- expect(match.any).toBeTruthy()
+ const files = modified(['test/test.js'])
+ const match = filter.match(files)
+ expect(match.any).toEqual(files)
})
test('globbing matches path where file or folder name starts with dot', () => {
@@ -87,8 +93,9 @@ describe('matching tests', () => {
- "**/*.js"
`
const filter = new Filter(yaml)
- const match = filter.match(['.test/.test.js'])
- expect(match.dot).toBeTruthy()
+ const files = modified(['.test/.test.js'])
+ const match = filter.match(files)
+ expect(match.dot).toEqual(files)
})
test('matches path based on rules included using YAML anchor', () => {
@@ -100,8 +107,49 @@ describe('matching tests', () => {
- *shared
- src/**/*
`
+ const filter = new Filter(yaml)
+ const files = modified(['config/settings.yml'])
+ const match = filter.match(files)
+ expect(match.src).toEqual(files)
+ })
+})
+
+describe('matching specific change status', () => {
+ test('does not match modified file as added', () => {
+ const yaml = `
+ add:
+ - added: "**/*"
+ `
+ let filter = new Filter(yaml)
+ const match = filter.match(modified(['file.js']))
+ expect(match.add).toEqual([])
+ })
+
+ test('match added file as added', () => {
+ const yaml = `
+ add:
+ - added: "**/*"
+ `
let filter = new Filter(yaml)
- const match = filter.match(['config/settings.yml'])
- expect(match.src).toBeTruthy()
+ const files = [{status: ChangeStatus.Added, filename: 'file.js'}]
+ const match = filter.match(files)
+ expect(match.add).toEqual(files)
+ })
+
+ test('matches when multiple statuses are configured', () => {
+ const yaml = `
+ addOrModify:
+ - added|modified: "**/*"
+ `
+ let filter = new Filter(yaml)
+ const files = [{status: ChangeStatus.Modified, filename: 'file.js'}]
+ const match = filter.match(files)
+ expect(match.addOrModify).toEqual(files)
})
})
+
+function modified(paths: string[]): File[] {
+ return paths.map(filename => {
+ return {filename, status: ChangeStatus.Modified}
+ })
+}
diff --git a/__tests__/git.test.ts b/__tests__/git.test.ts
index b5c25b01..ce46d6e0 100644
--- a/__tests__/git.test.ts
+++ b/__tests__/git.test.ts
@@ -1,4 +1,20 @@
import * as git from '../src/git'
+import {ChangeStatus} from '../src/file'
+
+describe('parsing output of the git diff command', () => {
+ test('parseGitDiffOutput returns files with correct change status', async () => {
+ const files = git.parseGitDiffOutput(
+ 'A\u0000LICENSE\u0000' + 'M\u0000src/index.ts\u0000' + 'D\u0000src/main.ts\u0000'
+ )
+ expect(files.length).toBe(3)
+ expect(files[0].filename).toBe('LICENSE')
+ expect(files[0].status).toBe(ChangeStatus.Added)
+ expect(files[1].filename).toBe('src/index.ts')
+ expect(files[1].status).toBe(ChangeStatus.Modified)
+ expect(files[2].filename).toBe('src/main.ts')
+ expect(files[2].status).toBe(ChangeStatus.Deleted)
+ })
+})
describe('git utility function tests (those not invoking git)', () => {
test('Detects if ref references a tag', () => {
diff --git a/__tests__/shell-escape.test.ts b/__tests__/shell-escape.test.ts
new file mode 100644
index 00000000..232cdd7b
--- /dev/null
+++ b/__tests__/shell-escape.test.ts
@@ -0,0 +1,16 @@
+import shellEscape from '../src/shell-escape'
+
+test('simple path escaped', () => {
+ expect(shellEscape('file')).toBe("'file'")
+})
+
+test('path with space is wrapped with single quotes', () => {
+ expect(shellEscape('file with space')).toBe("'file with space'")
+})
+
+test('path with quote is divided into quoted segments and escaped quote', () => {
+ expect(shellEscape("file'with quote")).toBe("'file'\\''with quote'")
+})
+test('path with leading quote does not have double quotes at beginning', () => {
+ expect(shellEscape("'file-leading-quote")).toBe("\\''file-leading-quote'")
+})
diff --git a/action.yml b/action.yml
index 723e07eb..267d7355 100644
--- a/action.yml
+++ b/action.yml
@@ -6,6 +6,9 @@ inputs:
description: 'GitHub Access Token'
required: false
default: ${{ github.token }}
+ working-directory:
+ description: 'Relative path under $GITHUB_WORKSPACE where the repository was checked out.'
+ required: false
base:
description: |
Git reference (e.g. branch name) against which the changes will be detected. Defaults to repository default branch (e.g. master).
@@ -15,9 +18,25 @@ inputs:
filters:
description: 'Path to the configuration file or YAML string with filters definition'
required: true
+ list-files:
+ description: |
+ Enables listing of files matching the filter:
+ 'none' - Disables listing of matching files (default).
+ 'json' - Matching files paths are serialized as JSON array.
+ 'shell' - Matching files paths are escaped and space-delimited. Output is usable as command line argument list in linux shell.
+ required: true
+ default: none
+ initial-fetch-depth:
+ description: |
+ How many commits are initially fetched from base branch.
+ If needed, each subsequent fetch doubles the previously requested number of commits
+ until the merge-base is found or there are no more commits in the history.
+ This option takes effect only when changes are detected using git against different base branch.
+ required: false
+ default: '10'
runs:
using: 'node12'
main: 'dist/index.js'
branding:
color: blue
- icon: filter
\ No newline at end of file
+ icon: filter
diff --git a/dist/index.js b/dist/index.js
index 78e4d51c..418489df 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -3788,47 +3788,134 @@ module.exports = require("child_process");
"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.trimRefsHeads = exports.trimRefs = exports.isTagRef = exports.getChangedFiles = exports.fetchCommit = exports.FETCH_HEAD = exports.NULL_SHA = void 0;
+exports.trimRefsHeads = exports.trimRefs = exports.isTagRef = exports.listAllFilesAsAdded = exports.parseGitDiffOutput = exports.getChangesSinceRef = exports.getChangesAgainstSha = exports.NULL_SHA = void 0;
const exec_1 = __webpack_require__(986);
+const core = __importStar(__webpack_require__(470));
+const file_1 = __webpack_require__(258);
exports.NULL_SHA = '0000000000000000000000000000000000000000';
-exports.FETCH_HEAD = 'FETCH_HEAD';
-function fetchCommit(ref) {
- return __awaiter(this, void 0, void 0, function* () {
- const exitCode = yield exec_1.exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref]);
- if (exitCode !== 0) {
- throw new Error(`Fetching ${ref} failed`);
- }
- });
+async function getChangesAgainstSha(sha) {
+ // Fetch single commit
+ core.startGroup(`Fetching ${sha} from origin`);
+ await exec_1.exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', sha]);
+ core.endGroup();
+ // Get differences between sha and HEAD
+ core.startGroup(`Change detection ${sha}..HEAD`);
+ let output = '';
+ try {
+ // Two dots '..' change detection - directly compares two versions
+ await exec_1.exec('git', ['diff', '--no-renames', '--name-status', '-z', `${sha}..HEAD`], {
+ listeners: {
+ stdout: (data) => (output += data.toString())
+ }
+ });
+ }
+ finally {
+ fixStdOutNullTermination();
+ core.endGroup();
+ }
+ return parseGitDiffOutput(output);
+}
+exports.getChangesAgainstSha = getChangesAgainstSha;
+async function getChangesSinceRef(ref, initialFetchDepth) {
+ // Fetch and add base branch
+ core.startGroup(`Fetching ${ref} from origin until merge-base is found`);
+ await exec_1.exec('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`]);
+ async function hasMergeBase() {
+ return (await exec_1.exec('git', ['merge-base', ref, 'HEAD'], { ignoreReturnCode: true })) === 0;
+ }
+ async function countCommits() {
+ return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref));
+ }
+ // Fetch more commits until merge-base is found
+ if (!(await hasMergeBase())) {
+ let deepen = initialFetchDepth;
+ let lastCommitsCount = await countCommits();
+ do {
+ await exec_1.exec('git', ['fetch', `--deepen=${deepen}`, '--no-tags', '--no-auto-gc', '-q']);
+ const count = await countCommits();
+ if (count <= lastCommitsCount) {
+ core.info('No merge base found - all files will be listed as added');
+ core.endGroup();
+ return await listAllFilesAsAdded();
+ }
+ lastCommitsCount = count;
+ deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER);
+ } while (!(await hasMergeBase()));
+ }
+ core.endGroup();
+ // Get changes introduced on HEAD compared to ref
+ core.startGroup(`Change detection ${ref}...HEAD`);
+ let output = '';
+ try {
+ // Three dots '...' change detection - finds merge-base and compares against it
+ await exec_1.exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`], {
+ listeners: {
+ stdout: (data) => (output += data.toString())
+ }
+ });
+ }
+ finally {
+ fixStdOutNullTermination();
+ core.endGroup();
+ }
+ return parseGitDiffOutput(output);
}
-exports.fetchCommit = fetchCommit;
-function getChangedFiles(ref) {
- return __awaiter(this, void 0, void 0, function* () {
- let output = '';
- const exitCode = yield exec_1.exec('git', ['diff-index', '--name-only', ref], {
+exports.getChangesSinceRef = getChangesSinceRef;
+function parseGitDiffOutput(output) {
+ const tokens = output.split('\u0000').filter(s => s.length > 0);
+ const files = [];
+ for (let i = 0; i + 1 < tokens.length; i += 2) {
+ files.push({
+ status: statusMap[tokens[i]],
+ filename: tokens[i + 1]
+ });
+ }
+ return files;
+}
+exports.parseGitDiffOutput = parseGitDiffOutput;
+async function listAllFilesAsAdded() {
+ core.startGroup('Listing all files tracked by git');
+ let output = '';
+ try {
+ await exec_1.exec('git', ['ls-files', '-z'], {
listeners: {
stdout: (data) => (output += data.toString())
}
});
- if (exitCode !== 0) {
- throw new Error(`Couldn't determine changed files`);
- }
- return output
- .split('\n')
- .map(s => s.trim())
- .filter(s => s.length > 0);
- });
+ }
+ finally {
+ fixStdOutNullTermination();
+ core.endGroup();
+ }
+ return output
+ .split('\u0000')
+ .filter(s => s.length > 0)
+ .map(path => ({
+ status: file_1.ChangeStatus.Added,
+ filename: path
+ }));
}
-exports.getChangedFiles = getChangedFiles;
+exports.listAllFilesAsAdded = listAllFilesAsAdded;
function isTagRef(ref) {
return ref.startsWith('refs/tags/');
}
@@ -3842,9 +3929,33 @@ function trimRefsHeads(ref) {
return trimStart(trimRef, 'heads/');
}
exports.trimRefsHeads = trimRefsHeads;
+async function getNumberOfCommits(ref) {
+ let output = '';
+ await exec_1.exec('git', ['rev-list', `--count`, ref], {
+ listeners: {
+ stdout: (data) => (output += data.toString())
+ }
+ });
+ const count = parseInt(output);
+ return isNaN(count) ? 0 : count;
+}
function trimStart(ref, start) {
return ref.startsWith(start) ? ref.substr(start.length) : ref;
}
+function fixStdOutNullTermination() {
+ // Previous command uses NULL as delimiters and output is printed to stdout.
+ // We have to make sure next thing written to stdout will start on new line.
+ // Otherwise things like ::set-output wouldn't work.
+ core.info('');
+}
+const statusMap = {
+ A: file_1.ChangeStatus.Added,
+ C: file_1.ChangeStatus.Copied,
+ D: file_1.ChangeStatus.Deleted,
+ M: file_1.ChangeStatus.Modified,
+ R: file_1.ChangeStatus.Renamed,
+ U: file_1.ChangeStatus.Unmerged
+};
/***/ }),
@@ -4479,15 +4590,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
@@ -4495,34 +4597,34 @@ Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(__webpack_require__(747));
const core = __importStar(__webpack_require__(470));
const github = __importStar(__webpack_require__(469));
-const filter_1 = __importDefault(__webpack_require__(235));
+const filter_1 = __webpack_require__(235);
+const file_1 = __webpack_require__(258);
const git = __importStar(__webpack_require__(136));
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- try {
- const token = core.getInput('token', { required: false });
- const filtersInput = core.getInput('filters', { required: true });
- const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput;
- const filter = new filter_1.default(filtersYaml);
- const files = yield getChangedFiles(token);
- if (files === null) {
- // Change detection was not possible
- // Set all filter keys to true (i.e. changed)
- for (const key in filter.rules) {
- core.setOutput(key, String(true));
- }
- }
- else {
- const result = filter.match(files);
- for (const key in result) {
- core.setOutput(key, String(result[key]));
- }
- }
- }
- catch (error) {
- core.setFailed(error.message);
+const shell_escape_1 = __importDefault(__webpack_require__(751));
+async function run() {
+ try {
+ const workingDirectory = core.getInput('working-directory', { required: false });
+ if (workingDirectory) {
+ process.chdir(workingDirectory);
+ }
+ const token = core.getInput('token', { required: false });
+ const base = core.getInput('base', { required: false });
+ const filtersInput = core.getInput('filters', { required: true });
+ const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput;
+ const listFiles = core.getInput('list-files', { required: false }).toLowerCase() || 'none';
+ const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', { required: false })) || 10;
+ if (!isExportFormat(listFiles)) {
+ core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`);
+ return;
}
- });
+ const filter = new filter_1.Filter(filtersYaml);
+ const files = await getChangedFiles(token, base, initialFetchDepth);
+ const results = filter.match(files);
+ exportResults(results, listFiles);
+ }
+ catch (error) {
+ core.setFailed(error.message);
+ }
}
function isPathInput(text) {
return !text.includes('\n');
@@ -4536,70 +4638,116 @@ function getConfigFileContent(configPath) {
}
return fs.readFileSync(configPath, { encoding: 'utf8' });
}
-function getChangedFiles(token) {
- return __awaiter(this, void 0, void 0, function* () {
- if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
- const pr = github.context.payload.pull_request;
- return token ? yield getChangedFilesFromApi(token, pr) : yield getChangedFilesFromGit(pr.base.sha);
+async function getChangedFiles(token, base, initialFetchDepth) {
+ if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
+ const pr = github.context.payload.pull_request;
+ return token
+ ? await getChangedFilesFromApi(token, pr)
+ : await git.getChangesSinceRef(pr.base.ref, initialFetchDepth);
+ }
+ else if (github.context.eventName === 'push') {
+ return getChangedFilesFromPush(base, initialFetchDepth);
+ }
+ else {
+ throw new Error('This action can be triggered only by pull_request, pull_request_target or push event');
+ }
+}
+async function getChangedFilesFromPush(base, initialFetchDepth) {
+ const push = github.context.payload;
+ // No change detection for pushed tags
+ if (git.isTagRef(push.ref)) {
+ core.info('Workflow is triggered by pushing of tag - all files will be listed as added');
+ return await git.listAllFilesAsAdded();
+ }
+ const baseRef = git.trimRefsHeads(base || push.repository.default_branch);
+ const pushRef = git.trimRefsHeads(push.ref);
+ // If base references same branch it was pushed to, we will do comparison against the previously pushed commit.
+ if (baseRef === pushRef) {
+ if (push.before === git.NULL_SHA) {
+ core.info('First push of a branch detected - all files will be listed as added');
+ return await git.listAllFilesAsAdded();
+ }
+ core.info(`Changes will be detected against the last previously pushed commit on same branch (${pushRef})`);
+ return await git.getChangesAgainstSha(push.before);
+ }
+ // Changes introduced by current branch against the base branch
+ core.info(`Changes will be detected against the branch ${baseRef}`);
+ return await git.getChangesSinceRef(baseRef, initialFetchDepth);
+}
+// Uses github REST api to get list of files changed in PR
+async function getChangedFilesFromApi(token, pullRequest) {
+ core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`);
+ const client = new github.GitHub(token);
+ const pageSize = 100;
+ const files = [];
+ for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
+ const response = await client.pulls.listFiles({
+ owner: github.context.repo.owner,
+ repo: github.context.repo.repo,
+ pull_number: pullRequest.number,
+ page,
+ per_page: pageSize
+ });
+ for (const row of response.data) {
+ // There's no obvious use-case for detection of renames
+ // Therefore we treat it as if rename detection in git diff was turned off.
+ // Rename is replaced by delete of original filename and add of new filename
+ if (row.status === file_1.ChangeStatus.Renamed) {
+ files.push({
+ filename: row.filename,
+ status: file_1.ChangeStatus.Added
+ });
+ files.push({
+ // 'previous_filename' for some unknown reason isn't in the type definition or documentation
+ filename: row.previous_filename,
+ status: file_1.ChangeStatus.Deleted
+ });
+ }
+ else {
+ files.push({
+ filename: row.filename,
+ status: row.status
+ });
+ }
}
- else if (github.context.eventName === 'push') {
- return getChangedFilesFromPush();
+ }
+ return files;
+}
+function exportResults(results, format) {
+ core.info('Results:');
+ for (const [key, files] of Object.entries(results)) {
+ const value = files.length > 0;
+ core.startGroup(`Filter ${key} = ${value}`);
+ if (files.length > 0) {
+ core.info('Matching files:');
+ for (const file of files) {
+ core.info(`${file.filename} [${file.status}]`);
+ }
}
else {
- throw new Error('This action can be triggered only by pull_request or push event');
+ core.info('Matching files: none');
}
- });
+ core.setOutput(key, value);
+ if (format !== 'none') {
+ const filesValue = serializeExport(files, format);
+ core.setOutput(`${key}_files`, filesValue);
+ }
+ }
+ core.endGroup();
}
-function getChangedFilesFromPush() {
- return __awaiter(this, void 0, void 0, function* () {
- const push = github.context.payload;
- // No change detection for pushed tags
- if (git.isTagRef(push.ref))
- return null;
- // Get base from input or use repo default branch.
- // It it starts with 'refs/', it will be trimmed (git fetch refs/heads/ doesn't work)
- const baseInput = git.trimRefs(core.getInput('base', { required: false }) || push.repository.default_branch);
- // If base references same branch it was pushed to, we will do comparison against the previously pushed commit.
- // Otherwise changes are detected against the base reference
- const base = git.trimRefsHeads(baseInput) === git.trimRefsHeads(push.ref) ? push.before : baseInput;
- // There is no previous commit for comparison
- // e.g. change detection against previous commit of just pushed new branch
- if (base === git.NULL_SHA)
- return null;
- return yield getChangedFilesFromGit(base);
- });
+function serializeExport(files, format) {
+ const fileNames = files.map(file => file.filename);
+ switch (format) {
+ case 'json':
+ return JSON.stringify(fileNames);
+ case 'shell':
+ return fileNames.map(shell_escape_1.default).join(' ');
+ default:
+ return '';
+ }
}
-// Fetch base branch and use `git diff` to determine changed files
-function getChangedFilesFromGit(ref) {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug('Fetching base branch and using `git diff-index` to determine changed files');
- yield git.fetchCommit(ref);
- // FETCH_HEAD will always point to the just fetched commit
- // No matter if ref is SHA, branch or tag name or full git ref
- return yield git.getChangedFiles(git.FETCH_HEAD);
- });
-}
-// Uses github REST api to get list of files changed in PR
-function getChangedFilesFromApi(token, pullRequest) {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug('Fetching list of modified files from Github API');
- const client = new github.GitHub(token);
- const pageSize = 100;
- const files = [];
- for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
- const response = yield client.pulls.listFiles({
- owner: github.context.repo.owner,
- repo: github.context.repo.repo,
- pull_number: pullRequest.number,
- page,
- per_page: pageSize
- });
- for (const row of response.data) {
- files.push(row.filename);
- }
- }
- return files;
- });
+function isExportFormat(value) {
+ return value === 'none' || value === 'shell' || value === 'json';
}
run();
@@ -4688,51 +4836,100 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
+exports.Filter = void 0;
const jsyaml = __importStar(__webpack_require__(414));
const minimatch = __importStar(__webpack_require__(595));
+// Minimatch options used in all matchers
+const MinimatchOptions = {
+ dot: true
+};
class Filter {
+ // Creates instance of Filter and load rules from YAML if it's provided
constructor(yaml) {
this.rules = {};
+ if (yaml) {
+ this.load(yaml);
+ }
+ }
+ // Load rules from YAML string
+ load(yaml) {
+ if (!yaml) {
+ return;
+ }
const doc = jsyaml.safeLoad(yaml);
if (typeof doc !== 'object') {
- this.throwInvalidFormatError();
+ this.throwInvalidFormatError('Root element is not an object');
}
- const opts = {
- dot: true
- };
- for (const name of Object.keys(doc)) {
- const patternsNode = doc[name];
- if (!Array.isArray(patternsNode)) {
- this.throwInvalidFormatError();
- }
- const patterns = flat(patternsNode);
- if (!patterns.every(x => typeof x === 'string')) {
- this.throwInvalidFormatError();
- }
- this.rules[name] = patterns.map(x => new minimatch.Minimatch(x, opts));
+ for (const [key, item] of Object.entries(doc)) {
+ this.rules[key] = this.parseFilterItemYaml(item);
}
}
- // Returns dictionary with match result per rules group
- match(paths) {
+ match(files) {
const result = {};
for (const [key, patterns] of Object.entries(this.rules)) {
- const match = paths.some(fileName => patterns.some(rule => rule.match(fileName)));
- result[key] = match;
+ result[key] = files.filter(file => this.isMatch(file, patterns));
}
return result;
}
- throwInvalidFormatError() {
- throw new Error('Invalid filter YAML format: Expected dictionary of string arrays');
+ isMatch(file, patterns) {
+ return patterns.some(rule => (rule.status === undefined || rule.status.includes(file.status)) && rule.matcher.match(file.filename));
+ }
+ parseFilterItemYaml(item) {
+ if (Array.isArray(item)) {
+ return flat(item.map(i => this.parseFilterItemYaml(i)));
+ }
+ if (typeof item === 'string') {
+ return [{ status: undefined, matcher: new minimatch.Minimatch(item, MinimatchOptions) }];
+ }
+ if (typeof item === 'object') {
+ return Object.entries(item).map(([key, pattern]) => {
+ if (typeof key !== 'string' || typeof pattern !== 'string') {
+ this.throwInvalidFormatError(`Expected [key:string]= pattern:string, but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`);
+ }
+ return {
+ status: key
+ .split('|')
+ .map(x => x.trim())
+ .filter(x => x.length > 0)
+ .map(x => x.toLowerCase()),
+ matcher: new minimatch.Minimatch(pattern, MinimatchOptions)
+ };
+ });
+ }
+ this.throwInvalidFormatError(`Unexpected element type '${typeof item}'`);
+ }
+ throwInvalidFormatError(message) {
+ throw new Error(`Invalid filter YAML format: ${message}.`);
}
}
-exports.default = Filter;
-// Creates a new array with all sub-array elements recursively concatenated
+exports.Filter = Filter;
+// Creates a new array with all sub-array elements concatenated
// In future could be replaced by Array.prototype.flat (supported on Node.js 11+)
function flat(arr) {
- return arr.reduce((acc, val) => acc.concat(Array.isArray(val) ? flat(val) : val), []);
+ return arr.reduce((acc, val) => acc.concat(val), []);
}
+/***/ }),
+
+/***/ 258:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ChangeStatus = void 0;
+var ChangeStatus;
+(function (ChangeStatus) {
+ ChangeStatus["Added"] = "added";
+ ChangeStatus["Copied"] = "copied";
+ ChangeStatus["Deleted"] = "deleted";
+ ChangeStatus["Modified"] = "modified";
+ ChangeStatus["Renamed"] = "renamed";
+ ChangeStatus["Unmerged"] = "unmerged";
+})(ChangeStatus = exports.ChangeStatus || (exports.ChangeStatus = {}));
+
+
/***/ }),
/***/ 260:
@@ -15144,6 +15341,23 @@ function sync (path, options) {
module.exports = require("fs");
+/***/ }),
+
+/***/ 751:
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+// Credits to https://github.com/xxorax/node-shell-escape
+Object.defineProperty(exports, "__esModule", { value: true });
+function shellEscape(value) {
+ return `'${value.replace(/'/g, "'\\''")}'`
+ .replace(/^(?:'')+/g, '') // unduplicate single-quote at the beginning
+ .replace(/\\'''/g, "\\'"); // remove non-escaped single-quote if there are enclosed between 2 escaped
+}
+exports.default = shellEscape;
+
+
/***/ }),
/***/ 753:
diff --git a/src/file.ts b/src/file.ts
new file mode 100644
index 00000000..d8125a7a
--- /dev/null
+++ b/src/file.ts
@@ -0,0 +1,13 @@
+export interface File {
+ filename: string
+ status: ChangeStatus
+}
+
+export enum ChangeStatus {
+ Added = 'added',
+ Copied = 'copied',
+ Deleted = 'deleted',
+ Modified = 'modified',
+ Renamed = 'renamed',
+ Unmerged = 'unmerged'
+}
diff --git a/src/filter.ts b/src/filter.ts
index 1f81d752..c6070bdf 100644
--- a/src/filter.ts
+++ b/src/filter.ts
@@ -1,49 +1,109 @@
import * as jsyaml from 'js-yaml'
import * as minimatch from 'minimatch'
+import {File, ChangeStatus} from './file'
-export default class Filter {
- rules: {[key: string]: minimatch.IMinimatch[]} = {}
+// Type definition of object we expect to load from YAML
+interface FilterYaml {
+ [name: string]: FilterItemYaml
+}
+type FilterItemYaml =
+ | string // Filename pattern, e.g. "path/to/*.js"
+ | {[changeTypes: string]: string} // Change status and filename, e.g. added|modified: "path/to/*.js"
+ | FilterItemYaml[] // Supports referencing another rule via YAML anchor
- constructor(yaml: string) {
- const doc = jsyaml.safeLoad(yaml)
- if (typeof doc !== 'object') {
- this.throwInvalidFormatError()
+// Minimatch options used in all matchers
+const MinimatchOptions: minimatch.IOptions = {
+ dot: true
+}
+
+// Internal representation of one item in named filter rule
+// Created as simplified form of data in FilterItemYaml
+interface FilterRuleItem {
+ status?: ChangeStatus[] // Required change status of the matched files
+ matcher: minimatch.IMinimatch // Matches the filename
+}
+
+export interface FilterResults {
+ [key: string]: File[]
+}
+
+export class Filter {
+ rules: {[key: string]: FilterRuleItem[]} = {}
+
+ // Creates instance of Filter and load rules from YAML if it's provided
+ constructor(yaml?: string) {
+ if (yaml) {
+ this.load(yaml)
}
+ }
- const opts: minimatch.IOptions = {
- dot: true
+ // Load rules from YAML string
+ load(yaml: string): void {
+ if (!yaml) {
+ return
}
- for (const name of Object.keys(doc)) {
- const patternsNode = doc[name]
- if (!Array.isArray(patternsNode)) {
- this.throwInvalidFormatError()
- }
- const patterns = flat(patternsNode) as string[]
- if (!patterns.every(x => typeof x === 'string')) {
- this.throwInvalidFormatError()
- }
- this.rules[name] = patterns.map(x => new minimatch.Minimatch(x, opts))
+ const doc = jsyaml.safeLoad(yaml) as FilterYaml
+ if (typeof doc !== 'object') {
+ this.throwInvalidFormatError('Root element is not an object')
+ }
+
+ for (const [key, item] of Object.entries(doc)) {
+ this.rules[key] = this.parseFilterItemYaml(item)
}
}
- // Returns dictionary with match result per rules group
- match(paths: string[]): {[key: string]: boolean} {
- const result: {[key: string]: boolean} = {}
+ match(files: File[]): FilterResults {
+ const result: FilterResults = {}
for (const [key, patterns] of Object.entries(this.rules)) {
- const match = paths.some(fileName => patterns.some(rule => rule.match(fileName)))
- result[key] = match
+ result[key] = files.filter(file => this.isMatch(file, patterns))
}
return result
}
- private throwInvalidFormatError(): never {
- throw new Error('Invalid filter YAML format: Expected dictionary of string arrays')
+ private isMatch(file: File, patterns: FilterRuleItem[]): boolean {
+ return patterns.some(
+ rule => (rule.status === undefined || rule.status.includes(file.status)) && rule.matcher.match(file.filename)
+ )
+ }
+
+ private parseFilterItemYaml(item: FilterItemYaml): FilterRuleItem[] {
+ if (Array.isArray(item)) {
+ return flat(item.map(i => this.parseFilterItemYaml(i)))
+ }
+
+ if (typeof item === 'string') {
+ return [{status: undefined, matcher: new minimatch.Minimatch(item, MinimatchOptions)}]
+ }
+
+ if (typeof item === 'object') {
+ return Object.entries(item).map(([key, pattern]) => {
+ if (typeof key !== 'string' || typeof pattern !== 'string') {
+ this.throwInvalidFormatError(
+ `Expected [key:string]= pattern:string, but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`
+ )
+ }
+ return {
+ status: key
+ .split('|')
+ .map(x => x.trim())
+ .filter(x => x.length > 0)
+ .map(x => x.toLowerCase()) as ChangeStatus[],
+ matcher: new minimatch.Minimatch(pattern, MinimatchOptions)
+ }
+ })
+ }
+
+ this.throwInvalidFormatError(`Unexpected element type '${typeof item}'`)
+ }
+
+ private throwInvalidFormatError(message: string): never {
+ throw new Error(`Invalid filter YAML format: ${message}.`)
}
}
-// Creates a new array with all sub-array elements recursively concatenated
+// Creates a new array with all sub-array elements concatenated
// In future could be replaced by Array.prototype.flat (supported on Node.js 11+)
-function flat(arr: any[]): any[] {
- return arr.reduce((acc, val) => acc.concat(Array.isArray(val) ? flat(val) : val), [])
+function flat(arr: T[][]): T[] {
+ return arr.reduce((acc, val) => acc.concat(val), [])
}
diff --git a/src/git.ts b/src/git.ts
index 25829e11..831ab6ac 100644
--- a/src/git.ts
+++ b/src/git.ts
@@ -1,31 +1,115 @@
import {exec} from '@actions/exec'
+import * as core from '@actions/core'
+import {File, ChangeStatus} from './file'
export const NULL_SHA = '0000000000000000000000000000000000000000'
-export const FETCH_HEAD = 'FETCH_HEAD'
-export async function fetchCommit(ref: string): Promise {
- const exitCode = await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref])
- if (exitCode !== 0) {
- throw new Error(`Fetching ${ref} failed`)
+export async function getChangesAgainstSha(sha: string): Promise {
+ // Fetch single commit
+ core.startGroup(`Fetching ${sha} from origin`)
+ await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', sha])
+ core.endGroup()
+
+ // Get differences between sha and HEAD
+ core.startGroup(`Change detection ${sha}..HEAD`)
+ let output = ''
+ try {
+ // Two dots '..' change detection - directly compares two versions
+ await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${sha}..HEAD`], {
+ listeners: {
+ stdout: (data: Buffer) => (output += data.toString())
+ }
+ })
+ } finally {
+ fixStdOutNullTermination()
+ core.endGroup()
}
+
+ return parseGitDiffOutput(output)
}
-export async function getChangedFiles(ref: string): Promise {
+export async function getChangesSinceRef(ref: string, initialFetchDepth: number): Promise {
+ // Fetch and add base branch
+ core.startGroup(`Fetching ${ref} from origin until merge-base is found`)
+ await exec('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`])
+
+ async function hasMergeBase(): Promise {
+ return (await exec('git', ['merge-base', ref, 'HEAD'], {ignoreReturnCode: true})) === 0
+ }
+
+ async function countCommits(): Promise {
+ return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref))
+ }
+
+ // Fetch more commits until merge-base is found
+ if (!(await hasMergeBase())) {
+ let deepen = initialFetchDepth
+ let lastCommitsCount = await countCommits()
+ do {
+ await exec('git', ['fetch', `--deepen=${deepen}`, '--no-tags', '--no-auto-gc', '-q'])
+ const count = await countCommits()
+ if (count <= lastCommitsCount) {
+ core.info('No merge base found - all files will be listed as added')
+ core.endGroup()
+ return await listAllFilesAsAdded()
+ }
+ lastCommitsCount = count
+ deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER)
+ } while (!(await hasMergeBase()))
+ }
+ core.endGroup()
+
+ // Get changes introduced on HEAD compared to ref
+ core.startGroup(`Change detection ${ref}...HEAD`)
let output = ''
- const exitCode = await exec('git', ['diff-index', '--name-only', ref], {
- listeners: {
- stdout: (data: Buffer) => (output += data.toString())
- }
- })
+ try {
+ // Three dots '...' change detection - finds merge-base and compares against it
+ await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`], {
+ listeners: {
+ stdout: (data: Buffer) => (output += data.toString())
+ }
+ })
+ } finally {
+ fixStdOutNullTermination()
+ core.endGroup()
+ }
+
+ return parseGitDiffOutput(output)
+}
+
+export function parseGitDiffOutput(output: string): File[] {
+ const tokens = output.split('\u0000').filter(s => s.length > 0)
+ const files: File[] = []
+ for (let i = 0; i + 1 < tokens.length; i += 2) {
+ files.push({
+ status: statusMap[tokens[i]],
+ filename: tokens[i + 1]
+ })
+ }
+ return files
+}
- if (exitCode !== 0) {
- throw new Error(`Couldn't determine changed files`)
+export async function listAllFilesAsAdded(): Promise {
+ core.startGroup('Listing all files tracked by git')
+ let output = ''
+ try {
+ await exec('git', ['ls-files', '-z'], {
+ listeners: {
+ stdout: (data: Buffer) => (output += data.toString())
+ }
+ })
+ } finally {
+ fixStdOutNullTermination()
+ core.endGroup()
}
return output
- .split('\n')
- .map(s => s.trim())
+ .split('\u0000')
.filter(s => s.length > 0)
+ .map(path => ({
+ status: ChangeStatus.Added,
+ filename: path
+ }))
}
export function isTagRef(ref: string): boolean {
@@ -41,6 +125,33 @@ export function trimRefsHeads(ref: string): string {
return trimStart(trimRef, 'heads/')
}
+async function getNumberOfCommits(ref: string): Promise {
+ let output = ''
+ await exec('git', ['rev-list', `--count`, ref], {
+ listeners: {
+ stdout: (data: Buffer) => (output += data.toString())
+ }
+ })
+ const count = parseInt(output)
+ return isNaN(count) ? 0 : count
+}
+
function trimStart(ref: string, start: string): string {
return ref.startsWith(start) ? ref.substr(start.length) : ref
}
+
+function fixStdOutNullTermination(): void {
+ // Previous command uses NULL as delimiters and output is printed to stdout.
+ // We have to make sure next thing written to stdout will start on new line.
+ // Otherwise things like ::set-output wouldn't work.
+ core.info('')
+}
+
+const statusMap: {[char: string]: ChangeStatus} = {
+ A: ChangeStatus.Added,
+ C: ChangeStatus.Copied,
+ D: ChangeStatus.Deleted,
+ M: ChangeStatus.Modified,
+ R: ChangeStatus.Renamed,
+ U: ChangeStatus.Unmerged
+}
diff --git a/src/main.ts b/src/main.ts
index a8c4030a..f2a20d3a 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -3,30 +3,36 @@ import * as core from '@actions/core'
import * as github from '@actions/github'
import {Webhooks} from '@octokit/webhooks'
-import Filter from './filter'
+import {Filter, FilterResults} from './filter'
+import {File, ChangeStatus} from './file'
import * as git from './git'
+import shellEscape from './shell-escape'
+
+type ExportFormat = 'none' | 'json' | 'shell'
async function run(): Promise {
try {
+ const workingDirectory = core.getInput('working-directory', {required: false})
+ if (workingDirectory) {
+ process.chdir(workingDirectory)
+ }
+
const token = core.getInput('token', {required: false})
+ const base = core.getInput('base', {required: false})
const filtersInput = core.getInput('filters', {required: true})
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
+ const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
+ const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
- const filter = new Filter(filtersYaml)
- const files = await getChangedFiles(token)
-
- if (files === null) {
- // Change detection was not possible
- // Set all filter keys to true (i.e. changed)
- for (const key in filter.rules) {
- core.setOutput(key, String(true))
- }
- } else {
- const result = filter.match(files)
- for (const key in result) {
- core.setOutput(key, String(result[key]))
- }
+ if (!isExportFormat(listFiles)) {
+ core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`)
+ return
}
+
+ const filter = new Filter(filtersYaml)
+ const files = await getChangedFiles(token, base, initialFetchDepth)
+ const results = filter.match(files)
+ exportResults(results, listFiles)
} catch (error) {
core.setFailed(error.message)
}
@@ -48,56 +54,56 @@ function getConfigFileContent(configPath: string): string {
return fs.readFileSync(configPath, {encoding: 'utf8'})
}
-async function getChangedFiles(token: string): Promise {
+async function getChangedFiles(token: string, base: string, initialFetchDepth: number): Promise {
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
- return token ? await getChangedFilesFromApi(token, pr) : await getChangedFilesFromGit(pr.base.sha)
+ return token
+ ? await getChangedFilesFromApi(token, pr)
+ : await git.getChangesSinceRef(pr.base.ref, initialFetchDepth)
} else if (github.context.eventName === 'push') {
- return getChangedFilesFromPush()
+ return getChangedFilesFromPush(base, initialFetchDepth)
} else {
- throw new Error('This action can be triggered only by pull_request or push event')
+ throw new Error('This action can be triggered only by pull_request, pull_request_target or push event')
}
}
-async function getChangedFilesFromPush(): Promise {
+async function getChangedFilesFromPush(base: string, initialFetchDepth: number): Promise {
const push = github.context.payload as Webhooks.WebhookPayloadPush
// No change detection for pushed tags
- if (git.isTagRef(push.ref)) return null
+ if (git.isTagRef(push.ref)) {
+ core.info('Workflow is triggered by pushing of tag - all files will be listed as added')
+ return await git.listAllFilesAsAdded()
+ }
- // Get base from input or use repo default branch.
- // It it starts with 'refs/', it will be trimmed (git fetch refs/heads/ doesn't work)
- const baseInput = git.trimRefs(core.getInput('base', {required: false}) || push.repository.default_branch)
+ const baseRef = git.trimRefsHeads(base || push.repository.default_branch)
+ const pushRef = git.trimRefsHeads(push.ref)
// If base references same branch it was pushed to, we will do comparison against the previously pushed commit.
- // Otherwise changes are detected against the base reference
- const base = git.trimRefsHeads(baseInput) === git.trimRefsHeads(push.ref) ? push.before : baseInput
-
- // There is no previous commit for comparison
- // e.g. change detection against previous commit of just pushed new branch
- if (base === git.NULL_SHA) return null
+ if (baseRef === pushRef) {
+ if (push.before === git.NULL_SHA) {
+ core.info('First push of a branch detected - all files will be listed as added')
+ return await git.listAllFilesAsAdded()
+ }
- return await getChangedFilesFromGit(base)
-}
+ core.info(`Changes will be detected against the last previously pushed commit on same branch (${pushRef})`)
+ return await git.getChangesAgainstSha(push.before)
+ }
-// Fetch base branch and use `git diff` to determine changed files
-async function getChangedFilesFromGit(ref: string): Promise {
- core.debug('Fetching base branch and using `git diff-index` to determine changed files')
- await git.fetchCommit(ref)
- // FETCH_HEAD will always point to the just fetched commit
- // No matter if ref is SHA, branch or tag name or full git ref
- return await git.getChangedFiles(git.FETCH_HEAD)
+ // Changes introduced by current branch against the base branch
+ core.info(`Changes will be detected against the branch ${baseRef}`)
+ return await git.getChangesSinceRef(baseRef, initialFetchDepth)
}
// Uses github REST api to get list of files changed in PR
async function getChangedFilesFromApi(
token: string,
pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest
-): Promise {
- core.debug('Fetching list of modified files from Github API')
+): Promise {
+ core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
const client = new github.GitHub(token)
const pageSize = 100
- const files: string[] = []
+ const files: File[] = []
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
const response = await client.pulls.listFiles({
owner: github.context.repo.owner,
@@ -107,11 +113,68 @@ async function getChangedFilesFromApi(
per_page: pageSize
})
for (const row of response.data) {
- files.push(row.filename)
+ // There's no obvious use-case for detection of renames
+ // Therefore we treat it as if rename detection in git diff was turned off.
+ // Rename is replaced by delete of original filename and add of new filename
+ if (row.status === ChangeStatus.Renamed) {
+ files.push({
+ filename: row.filename,
+ status: ChangeStatus.Added
+ })
+ files.push({
+ // 'previous_filename' for some unknown reason isn't in the type definition or documentation
+ filename: (row).previous_filename as string,
+ status: ChangeStatus.Deleted
+ })
+ } else {
+ files.push({
+ filename: row.filename,
+ status: row.status as ChangeStatus
+ })
+ }
}
}
return files
}
+function exportResults(results: FilterResults, format: ExportFormat): void {
+ core.info('Results:')
+ for (const [key, files] of Object.entries(results)) {
+ const value = files.length > 0
+ core.startGroup(`Filter ${key} = ${value}`)
+ if (files.length > 0) {
+ core.info('Matching files:')
+ for (const file of files) {
+ core.info(`${file.filename} [${file.status}]`)
+ }
+ } else {
+ core.info('Matching files: none')
+ }
+
+ core.setOutput(key, value)
+ if (format !== 'none') {
+ const filesValue = serializeExport(files, format)
+ core.setOutput(`${key}_files`, filesValue)
+ }
+ }
+ core.endGroup()
+}
+
+function serializeExport(files: File[], format: ExportFormat): string {
+ const fileNames = files.map(file => file.filename)
+ switch (format) {
+ case 'json':
+ return JSON.stringify(fileNames)
+ case 'shell':
+ return fileNames.map(shellEscape).join(' ')
+ default:
+ return ''
+ }
+}
+
+function isExportFormat(value: string): value is ExportFormat {
+ return value === 'none' || value === 'shell' || value === 'json'
+}
+
run()
diff --git a/src/shell-escape.ts b/src/shell-escape.ts
new file mode 100644
index 00000000..6dfe46d1
--- /dev/null
+++ b/src/shell-escape.ts
@@ -0,0 +1,7 @@
+// Credits to https://github.com/xxorax/node-shell-escape
+
+export default function shellEscape(value: string): string {
+ return `'${value.replace(/'/g, "'\\''")}'`
+ .replace(/^(?:'')+/g, '') // unduplicate single-quote at the beginning
+ .replace(/\\'''/g, "\\'") // remove non-escaped single-quote if there are enclosed between 2 escaped
+}
diff --git a/tsconfig.json b/tsconfig.json
index f6e7cb5b..fa74d553 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -1,6 +1,6 @@
{
"compilerOptions": {
- "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
+ "target": "es2019", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */