From 8ec3b44912ed3e75618f85747d6fa91a271ffc33 Mon Sep 17 00:00:00 2001 From: Diogo Kiss Date: Mon, 25 Aug 2025 18:39:18 +0200 Subject: [PATCH] feat: add write-to-files feature to deal with large change sets This commit introduces a new input named 'write-to-files'. It enables writing the lists of matching files to a corresponding file in addition to the output '_files'. If set, the action will create the specified file with the list of matching files. The file will be written in the format specified by the `list-files` option and named after the filter. The path to the file will be output as a variable named `_files_path`. --- README.md | 50 ++++++++++++++++++++++++++++++++++++++++++++------ action.yml | 7 +++++++ dist/index.js | 26 ++++++++++++++++++++------ src/main.ts | 26 ++++++++++++++++++++------ 4 files changed, 91 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index b5e0f4c..00d536f 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,13 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob # Default: none list-files: '' + # Enables writing the lists of matching files to a corresponding file. + # If set, the action will create the specified file with the list of matching files. + # The file will be written in the format specified by the `list-files` option and named + # after the filter. The path to the file will be relative to the working directory and + # exported as an output variable named `_files_path`. + write-to-files: '' + # Relative path under $GITHUB_WORKSPACE where the repository was checked out. working-directory: '' @@ -154,14 +161,14 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob # Default: ${{ github.token }} token: '' - # Optional parameter to override the default behavior of file matching algorithm. + # Optional parameter to override the default behavior of file matching algorithm. # By default files that match at least one pattern defined by the filters will be included. # This parameter allows to override the "at least one pattern" behavior to make it so that - # all of the patterns have to match or otherwise the file is excluded. - # An example scenario where this is useful if you would like to match all - # .ts files in a sub-directory but not .md files. - # The filters below will match markdown files despite the exclusion syntax UNLESS - # you specify 'every' as the predicate-quantifier parameter. When you do that, + # all of the patterns have to match or otherwise the file is excluded. + # An example scenario where this is useful if you would like to match all + # .ts files in a sub-directory but not .md files. + # The filters below will match markdown files despite the exclusion syntax UNLESS + # you specify 'every' as the predicate-quantifier parameter. When you do that, # it will only match the .ts files in the subdirectory as expected. # # backend: @@ -505,6 +512,37 @@ jobs: +
+ Handle large change sets (2000+ files) + +```yaml +- uses: dorny/paths-filter@v3 + id: changed + with: + # Enable writing the files matching each filter to the disk in addition to the output '_files'. + # The path for each filter's file is output in the format '_files_path'. + write-to-files: true + list-files: json + filters: | + content: + - 'content/**' + + +- name: List changed directories relative to the base directory + shell: bash + env: + BASE_DIR: ${{ inputs.base-directory }} + CHANGED_CONTENT_FILES_PATH: ${{ steps.changed.outputs.content_files_path }} + run: | + CHANGED_CONTENT_DIRECTORIES=$(cat "${CHANGED_CONTENT_FILES_PATH}" | xargs -n1 realpath -m --relative-to=${BASE_DIR} | cut -f1 -d / | sort -u) + for d in $CHANGED_CONTENT_DIRECTORIES + do + echo "Content directory change detected: ${d}" + done +``` + +
+ ### Custom processing of changed files
diff --git a/action.yml b/action.yml index e7d24f5..9b0dc18 100644 --- a/action.yml +++ b/action.yml @@ -36,6 +36,13 @@ inputs: Backslash escapes every potentially unsafe character. required: false default: none + write-to-files: + description: | + Enables writing the lists of matching files to a corresponding file in addition to the output '_files'. + If set, the action will create the specified file with the list of matching files. + The file will be written in the format specified by the `list-files` option and named + after the filter. The path to the file will be output as a variable named `_files_path`. + required: false initial-fetch-depth: description: | How many commits are initially fetched from base branch. diff --git a/dist/index.js b/dist/index.js index cc7d7d4..3187bac 100644 --- a/dist/index.js +++ b/dist/index.js @@ -552,15 +552,20 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); const fs = __importStar(__nccwpck_require__(7147)); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); +const path_1 = __importDefault(__nccwpck_require__(1017)); const filter_1 = __nccwpck_require__(3707); const file_1 = __nccwpck_require__(4014); const git = __importStar(__nccwpck_require__(3374)); const shell_escape_1 = __nccwpck_require__(4613); const csv_escape_1 = __nccwpck_require__(7402); +const fs_1 = __nccwpck_require__(7147); async function run() { try { const workingDirectory = core.getInput('working-directory', { required: false }); @@ -573,6 +578,7 @@ async function run() { const filtersInput = core.getInput('filters', { required: true }); const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput; const listFiles = core.getInput('list-files', { required: false }).toLowerCase() || 'none'; + const writeToFiles = core.getInput('write-to-files', { required: false }).toLowerCase() === 'true'; const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', { required: false })) || 10; const predicateQuantifier = core.getInput('predicate-quantifier', { required: false }) || filter_1.PredicateQuantifier.SOME; if (!isExportFormat(listFiles)) { @@ -589,7 +595,7 @@ async function run() { const files = await getChangedFiles(token, base, ref, initialFetchDepth); core.info(`Detected ${files.length} changed files`); const results = filter.match(files); - exportResults(results, listFiles); + exportResults(results, listFiles, writeToFiles); } catch (error) { core.setFailed(getErrorMessage(error)); @@ -742,13 +748,14 @@ async function getChangedFilesFromApi(token, pullRequest) { core.endGroup(); } } -function exportResults(results, format) { +function exportResults(results, format, writeToFiles) { + const tempDir = (0, fs_1.mkdtempSync)(path_1.default.join(process.cwd(), 'paths-filter-')); core.info('Results:'); const changes = []; for (const [key, files] of Object.entries(results)) { - const value = files.length > 0; - core.startGroup(`Filter ${key} = ${value}`); - if (files.length > 0) { + const match = files.length > 0; + core.startGroup(`Filter ${key} = ${match}`); + if (match) { changes.push(key); core.info('Matching files:'); for (const file of files) { @@ -758,11 +765,18 @@ function exportResults(results, format) { else { core.info('Matching files: none'); } - core.setOutput(key, value); + core.setOutput(key, match); core.setOutput(`${key}_count`, files.length); if (format !== 'none') { const filesValue = serializeExport(files, format); core.setOutput(`${key}_files`, filesValue); + if (writeToFiles) { + const ext = format === 'json' ? 'json' : 'txt'; + const filePath = path_1.default.join(tempDir, `${key}-files.${ext}`); + fs.writeFileSync(filePath, filesValue); + core.info(`Matching files list for filter '${key}' written to '${filePath}'`); + core.setOutput(`${key}_files_path`, filePath); + } } core.endGroup(); } diff --git a/src/main.ts b/src/main.ts index 8320287..76bccac 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,6 +1,7 @@ import * as fs from 'fs' import * as core from '@actions/core' import * as github from '@actions/github' +import path from 'path' import {GetResponseDataTypeFromEndpointMethod} from '@octokit/types' import {PushEvent, PullRequestEvent} from '@octokit/webhooks-types' @@ -16,6 +17,7 @@ import {File, ChangeStatus} from './file' import * as git from './git' import {backslashEscape, shellEscape} from './list-format/shell-escape' import {csvEscape} from './list-format/csv-escape' +import {mkdtempSync} from 'fs' type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape' @@ -32,6 +34,7 @@ async function run(): Promise { const filtersInput = core.getInput('filters', {required: true}) const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none' + const writeToFiles = core.getInput('write-to-files', {required: false}).toLowerCase() === 'true' const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10 const predicateQuantifier = core.getInput('predicate-quantifier', {required: false}) || PredicateQuantifier.SOME @@ -52,7 +55,7 @@ async function run(): Promise { const files = await getChangedFiles(token, base, ref, initialFetchDepth) core.info(`Detected ${files.length} changed files`) const results = filter.match(files) - exportResults(results, listFiles) + exportResults(results, listFiles, writeToFiles) } catch (error) { core.setFailed(getErrorMessage(error)) } @@ -228,13 +231,15 @@ async function getChangedFilesFromApi(token: string, pullRequest: PullRequestEve } } -function exportResults(results: FilterResults, format: ExportFormat): void { +function exportResults(results: FilterResults, format: ExportFormat, writeToFiles: boolean): void { + const tempDir = mkdtempSync(path.join(process.cwd(), 'paths-filter-')) + core.info('Results:') const changes = [] for (const [key, files] of Object.entries(results)) { - const value = files.length > 0 - core.startGroup(`Filter ${key} = ${value}`) - if (files.length > 0) { + const match = files.length > 0 + core.startGroup(`Filter ${key} = ${match}`) + if (match) { changes.push(key) core.info('Matching files:') for (const file of files) { @@ -244,12 +249,21 @@ function exportResults(results: FilterResults, format: ExportFormat): void { core.info('Matching files: none') } - core.setOutput(key, value) + core.setOutput(key, match) core.setOutput(`${key}_count`, files.length) if (format !== 'none') { const filesValue = serializeExport(files, format) core.setOutput(`${key}_files`, filesValue) + + if (writeToFiles) { + const ext = format === 'json' ? 'json' : 'txt' + const filePath = path.join(tempDir, `${key}-files.${ext}`) + fs.writeFileSync(filePath, filesValue) + core.info(`Matching files list for filter '${key}' written to '${filePath}'`) + core.setOutput(`${key}_files_path`, filePath) + } } + core.endGroup() }