This commit is contained in:
Diogo Kiss 2025-11-27 10:27:00 +01:00 committed by GitHub
commit 8fcccff3c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 91 additions and 18 deletions

View File

@ -142,6 +142,13 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob
# Default: none
list-files: ''
# Enables writing the lists of matching files to a corresponding file.
# If set, the action will create the specified file with the list of matching files.
# The file will be written in the format specified by the `list-files` option and named
# after the filter. The path to the file will be relative to the working directory and
# exported as an output variable named `<filter-name>_files_path`.
write-to-files: ''
# Relative path under $GITHUB_WORKSPACE where the repository was checked out.
working-directory: ''
@ -154,14 +161,14 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob
# Default: ${{ github.token }}
token: ''
# Optional parameter to override the default behavior of file matching algorithm.
# Optional parameter to override the default behavior of file matching algorithm.
# By default files that match at least one pattern defined by the filters will be included.
# This parameter allows to override the "at least one pattern" behavior to make it so that
# all of the patterns have to match or otherwise the file is excluded.
# An example scenario where this is useful if you would like to match all
# .ts files in a sub-directory but not .md files.
# The filters below will match markdown files despite the exclusion syntax UNLESS
# you specify 'every' as the predicate-quantifier parameter. When you do that,
# all of the patterns have to match or otherwise the file is excluded.
# An example scenario where this is useful if you would like to match all
# .ts files in a sub-directory but not .md files.
# The filters below will match markdown files despite the exclusion syntax UNLESS
# you specify 'every' as the predicate-quantifier parameter. When you do that,
# it will only match the .ts files in the subdirectory as expected.
#
# backend:
@ -505,6 +512,37 @@ jobs:
</details>
<details>
<summary>Handle large change sets (2000+ files)</summary>
```yaml
- uses: dorny/paths-filter@v3
id: changed
with:
# Enable writing the files matching each filter to the disk in addition to the output '<filter_name>_files'.
# The path for each filter's file is output in the format '<filter_name>_files_path'.
write-to-files: true
list-files: json
filters: |
content:
- 'content/**'
- name: List changed directories relative to the base directory
shell: bash
env:
BASE_DIR: ${{ inputs.base-directory }}
CHANGED_CONTENT_FILES_PATH: ${{ steps.changed.outputs.content_files_path }}
run: |
CHANGED_CONTENT_DIRECTORIES=$(cat "${CHANGED_CONTENT_FILES_PATH}" | xargs -n1 realpath -m --relative-to=${BASE_DIR} | cut -f1 -d / | sort -u)
for d in $CHANGED_CONTENT_DIRECTORIES
do
echo "Content directory change detected: ${d}"
done
```
</details>
### Custom processing of changed files
<details>

View File

@ -36,6 +36,13 @@ inputs:
Backslash escapes every potentially unsafe character.
required: false
default: none
write-to-files:
description: |
Enables writing the lists of matching files to a corresponding file in addition to the output '<filter-name>_files'.
If set, the action will create the specified file with the list of matching files.
The file will be written in the format specified by the `list-files` option and named
after the filter. The path to the file will be output as a variable named `<filter-name>_files_path`.
required: false
initial-fetch-depth:
description: |
How many commits are initially fetched from base branch.

26
dist/index.js vendored
View File

@ -552,15 +552,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __importStar(__nccwpck_require__(7147));
const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438));
const path_1 = __importDefault(__nccwpck_require__(1017));
const filter_1 = __nccwpck_require__(3707);
const file_1 = __nccwpck_require__(4014);
const git = __importStar(__nccwpck_require__(3374));
const shell_escape_1 = __nccwpck_require__(4613);
const csv_escape_1 = __nccwpck_require__(7402);
const fs_1 = __nccwpck_require__(7147);
async function run() {
try {
const workingDirectory = core.getInput('working-directory', { required: false });
@ -573,6 +578,7 @@ async function run() {
const filtersInput = core.getInput('filters', { required: true });
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput;
const listFiles = core.getInput('list-files', { required: false }).toLowerCase() || 'none';
const writeToFiles = core.getInput('write-to-files', { required: false }).toLowerCase() === 'true';
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', { required: false })) || 10;
const predicateQuantifier = core.getInput('predicate-quantifier', { required: false }) || filter_1.PredicateQuantifier.SOME;
if (!isExportFormat(listFiles)) {
@ -589,7 +595,7 @@ async function run() {
const files = await getChangedFiles(token, base, ref, initialFetchDepth);
core.info(`Detected ${files.length} changed files`);
const results = filter.match(files);
exportResults(results, listFiles);
exportResults(results, listFiles, writeToFiles);
}
catch (error) {
core.setFailed(getErrorMessage(error));
@ -742,13 +748,14 @@ async function getChangedFilesFromApi(token, pullRequest) {
core.endGroup();
}
}
function exportResults(results, format) {
function exportResults(results, format, writeToFiles) {
const tempDir = (0, fs_1.mkdtempSync)(path_1.default.join(process.cwd(), 'paths-filter-'));
core.info('Results:');
const changes = [];
for (const [key, files] of Object.entries(results)) {
const value = files.length > 0;
core.startGroup(`Filter ${key} = ${value}`);
if (files.length > 0) {
const match = files.length > 0;
core.startGroup(`Filter ${key} = ${match}`);
if (match) {
changes.push(key);
core.info('Matching files:');
for (const file of files) {
@ -758,11 +765,18 @@ function exportResults(results, format) {
else {
core.info('Matching files: none');
}
core.setOutput(key, value);
core.setOutput(key, match);
core.setOutput(`${key}_count`, files.length);
if (format !== 'none') {
const filesValue = serializeExport(files, format);
core.setOutput(`${key}_files`, filesValue);
if (writeToFiles) {
const ext = format === 'json' ? 'json' : 'txt';
const filePath = path_1.default.join(tempDir, `${key}-files.${ext}`);
fs.writeFileSync(filePath, filesValue);
core.info(`Matching files list for filter '${key}' written to '${filePath}'`);
core.setOutput(`${key}_files_path`, filePath);
}
}
core.endGroup();
}

View File

@ -1,6 +1,7 @@
import * as fs from 'fs'
import * as core from '@actions/core'
import * as github from '@actions/github'
import path from 'path'
import {GetResponseDataTypeFromEndpointMethod} from '@octokit/types'
import {PushEvent, PullRequestEvent} from '@octokit/webhooks-types'
@ -16,6 +17,7 @@ import {File, ChangeStatus} from './file'
import * as git from './git'
import {backslashEscape, shellEscape} from './list-format/shell-escape'
import {csvEscape} from './list-format/csv-escape'
import {mkdtempSync} from 'fs'
type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape'
@ -32,6 +34,7 @@ async function run(): Promise<void> {
const filtersInput = core.getInput('filters', {required: true})
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
const writeToFiles = core.getInput('write-to-files', {required: false}).toLowerCase() === 'true'
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
const predicateQuantifier = core.getInput('predicate-quantifier', {required: false}) || PredicateQuantifier.SOME
@ -52,7 +55,7 @@ async function run(): Promise<void> {
const files = await getChangedFiles(token, base, ref, initialFetchDepth)
core.info(`Detected ${files.length} changed files`)
const results = filter.match(files)
exportResults(results, listFiles)
exportResults(results, listFiles, writeToFiles)
} catch (error) {
core.setFailed(getErrorMessage(error))
}
@ -228,13 +231,15 @@ async function getChangedFilesFromApi(token: string, pullRequest: PullRequestEve
}
}
function exportResults(results: FilterResults, format: ExportFormat): void {
function exportResults(results: FilterResults, format: ExportFormat, writeToFiles: boolean): void {
const tempDir = mkdtempSync(path.join(process.cwd(), 'paths-filter-'))
core.info('Results:')
const changes = []
for (const [key, files] of Object.entries(results)) {
const value = files.length > 0
core.startGroup(`Filter ${key} = ${value}`)
if (files.length > 0) {
const match = files.length > 0
core.startGroup(`Filter ${key} = ${match}`)
if (match) {
changes.push(key)
core.info('Matching files:')
for (const file of files) {
@ -244,12 +249,21 @@ function exportResults(results: FilterResults, format: ExportFormat): void {
core.info('Matching files: none')
}
core.setOutput(key, value)
core.setOutput(key, match)
core.setOutput(`${key}_count`, files.length)
if (format !== 'none') {
const filesValue = serializeExport(files, format)
core.setOutput(`${key}_files`, filesValue)
if (writeToFiles) {
const ext = format === 'json' ? 'json' : 'txt'
const filePath = path.join(tempDir, `${key}-files.${ext}`)
fs.writeFileSync(filePath, filesValue)
core.info(`Matching files list for filter '${key}' written to '${filePath}'`)
core.setOutput(`${key}_files_path`, filePath)
}
}
core.endGroup()
}