mirror of
https://github.com/actions/checkout.git
synced 2026-03-10 16:34:26 +00:00
Merge 4bd535976f into 0c366fd6a8
This commit is contained in:
commit
ff462458c7
@ -160,6 +160,12 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
|
||||
# running from unless specified. Example URLs are https://github.com or
|
||||
# https://my-ghes-server.example.com
|
||||
github-server-url: ''
|
||||
|
||||
# Path to a local directory used as a reference cache for Git clones. Over time,
|
||||
# this directory will contain bare clones of the checked-out repositories (and
|
||||
# their submodules). Using this significantly reduces network bandwidth and speeds
|
||||
# up clones.
|
||||
reference-cache: ''
|
||||
```
|
||||
<!-- end usage -->
|
||||
|
||||
|
||||
@ -1047,8 +1047,10 @@ async function setup(testName: string): Promise<void> {
|
||||
lfsFetch: jest.fn(),
|
||||
lfsInstall: jest.fn(),
|
||||
log1: jest.fn(),
|
||||
referenceAdd: jest.fn(),
|
||||
remoteAdd: jest.fn(),
|
||||
removeEnvironmentVariable: jest.fn((name: string) => delete git.env[name]),
|
||||
execGit: jest.fn(),
|
||||
revParse: jest.fn(),
|
||||
setEnvironmentVariable: jest.fn((name: string, value: string) => {
|
||||
git.env[name] = value
|
||||
@ -1157,6 +1159,7 @@ async function setup(testName: string): Promise<void> {
|
||||
sparseCheckout: [],
|
||||
sparseCheckoutConeMode: true,
|
||||
fetchDepth: 1,
|
||||
fetchDepthExplicit: false,
|
||||
fetchTags: false,
|
||||
showProgress: true,
|
||||
lfs: false,
|
||||
@ -1173,7 +1176,8 @@ async function setup(testName: string): Promise<void> {
|
||||
sshUser: '',
|
||||
workflowOrganizationId: 123456,
|
||||
setSafeDirectory: true,
|
||||
githubServerUrl: githubServerUrl
|
||||
githubServerUrl: githubServerUrl,
|
||||
referenceCache: ''
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
109
__test__/git-cache-helper.test.ts
Normal file
109
__test__/git-cache-helper.test.ts
Normal file
@ -0,0 +1,109 @@
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
import * as io from '@actions/io'
|
||||
import { GitCacheHelper } from '../src/git-cache-helper'
|
||||
import { IGitCommandManager } from '../src/git-command-manager'
|
||||
|
||||
describe('GitCacheHelper', () => {
|
||||
let cacheHelper: GitCacheHelper
|
||||
let mockGit: jest.Mocked<IGitCommandManager>
|
||||
|
||||
const cacheDir = path.join(__dirname, 'test-cache')
|
||||
|
||||
beforeEach(async () => {
|
||||
cacheHelper = new GitCacheHelper(cacheDir)
|
||||
mockGit = {
|
||||
execGit: jest.fn().mockImplementation(async (args) => {
|
||||
// If git clone is called, simulate creating the destination dir
|
||||
if (args && args.includes('clone')) {
|
||||
const dest = args.find((a: string) => a.includes('.tmp.'));
|
||||
if (dest) {
|
||||
await io.mkdirP(dest);
|
||||
} else {
|
||||
console.log('No .tmp. found in args:', args);
|
||||
}
|
||||
}
|
||||
return { exitCode: 0, stdout: '', stderr: '' };
|
||||
}),
|
||||
gitEnv: {}
|
||||
} as any
|
||||
|
||||
await io.mkdirP(cacheDir)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await io.rmRF(cacheDir)
|
||||
})
|
||||
|
||||
it('generates a consistent, short, and safe cache directory name', () => {
|
||||
const url1 = 'https://github.com/mwyraz/forgejo-actions-checkout.git'
|
||||
const name1 = (cacheHelper as any).generateCacheDirName(url1)
|
||||
|
||||
// Check structure: safe string + hash
|
||||
expect(name1).toMatch(/^https___github_com_mwyraz_forgejo_actions_checkout_git_[0-9a-f]{8}\.git$/)
|
||||
|
||||
// Same URL should produce the same directory name
|
||||
const url1_duplicate = 'https://github.com/mwyraz/forgejo-actions-checkout.git'
|
||||
expect((cacheHelper as any).generateCacheDirName(url1_duplicate)).toBe(name1)
|
||||
|
||||
// Different URL should produce a different directory name
|
||||
const url2 = 'https://github.com/mwyraz/forgejo-actions-checkout-other.git'
|
||||
expect((cacheHelper as any).generateCacheDirName(url2)).not.toBe(name1)
|
||||
|
||||
// SSH URL
|
||||
const url3 = 'git@github.com:auth/repo.git'
|
||||
const name3 = (cacheHelper as any).generateCacheDirName(url3)
|
||||
expect(name3).toMatch(/^git_github_com_auth_repo_git_[0-9a-f]{8}\.git$/)
|
||||
|
||||
// Unclean URLs
|
||||
const url4 = 'https://github.com/foo/bar.git?v=1'
|
||||
const name4 = (cacheHelper as any).generateCacheDirName(url4)
|
||||
expect(name4).toMatch(/^https___github_com_foo_bar_git_v_1_[0-9a-f]{8}\.git$/)
|
||||
})
|
||||
|
||||
it('sets up a cache directory if it does not exist', async () => {
|
||||
const repositoryUrl = 'https://github.com/mwyraz/test-repo.git'
|
||||
const resultPath = await cacheHelper.setupCache(mockGit, repositoryUrl)
|
||||
|
||||
const expectedName = (cacheHelper as any).generateCacheDirName(repositoryUrl)
|
||||
expect(resultPath).toBe(path.join(cacheDir, expectedName))
|
||||
|
||||
// It should have executed git clone --bare
|
||||
expect(mockGit.execGit).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
'-C',
|
||||
cacheDir,
|
||||
'clone',
|
||||
'--bare',
|
||||
repositoryUrl,
|
||||
expect.stringContaining(`${expectedName}.tmp`) // should use tmp dir
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
it('fetches updates if the cache directory already exists', async () => {
|
||||
const repositoryUrl = 'https://github.com/mwyraz/existing-repo.git'
|
||||
const expectedName = (cacheHelper as any).generateCacheDirName(repositoryUrl)
|
||||
const fixedPath = path.join(cacheDir, expectedName)
|
||||
|
||||
// Fake existing directory
|
||||
await io.mkdirP(path.join(fixedPath, 'objects'))
|
||||
|
||||
const resultPath = await cacheHelper.setupCache(mockGit, repositoryUrl)
|
||||
expect(resultPath).toBe(fixedPath)
|
||||
|
||||
// It should have executed git fetch
|
||||
expect(mockGit.execGit).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
'-C',
|
||||
fixedPath,
|
||||
'fetch',
|
||||
'--force',
|
||||
'--prune',
|
||||
'--tags',
|
||||
'origin',
|
||||
'+refs/heads/*:refs/heads/*'
|
||||
])
|
||||
)
|
||||
})
|
||||
})
|
||||
@ -478,8 +478,10 @@ async function setup(testName: string): Promise<void> {
|
||||
lfsFetch: jest.fn(),
|
||||
lfsInstall: jest.fn(),
|
||||
log1: jest.fn(),
|
||||
referenceAdd: jest.fn(),
|
||||
remoteAdd: jest.fn(),
|
||||
removeEnvironmentVariable: jest.fn(),
|
||||
execGit: jest.fn(),
|
||||
revParse: jest.fn(),
|
||||
setEnvironmentVariable: jest.fn(),
|
||||
shaExists: jest.fn(),
|
||||
|
||||
182
__test__/git-source-provider-reference-cache.test.ts
Normal file
182
__test__/git-source-provider-reference-cache.test.ts
Normal file
@ -0,0 +1,182 @@
|
||||
import * as path from 'path'
|
||||
|
||||
const mockStartGroup = jest.fn()
|
||||
const mockEndGroup = jest.fn()
|
||||
const mockInfo = jest.fn()
|
||||
const mockWarning = jest.fn()
|
||||
const mockSetOutput = jest.fn()
|
||||
const mockSetSecret = jest.fn()
|
||||
|
||||
const mockCreateCommandManager = jest.fn()
|
||||
const mockCreateAuthHelper = jest.fn()
|
||||
const mockPrepareExistingDirectory = jest.fn()
|
||||
const mockGetFetchUrl = jest.fn()
|
||||
const mockGetRefSpec = jest.fn()
|
||||
const mockTestRef = jest.fn()
|
||||
const mockGetCheckoutInfo = jest.fn()
|
||||
const mockCheckCommitInfo = jest.fn()
|
||||
const mockSetRepositoryPath = jest.fn()
|
||||
const mockSetupCache = jest.fn()
|
||||
const mockDirectoryExistsSync = jest.fn()
|
||||
const mockFileExistsSync = jest.fn()
|
||||
|
||||
jest.mock('@actions/core', () => ({
|
||||
startGroup: mockStartGroup,
|
||||
endGroup: mockEndGroup,
|
||||
info: mockInfo,
|
||||
warning: mockWarning,
|
||||
setOutput: mockSetOutput,
|
||||
setSecret: mockSetSecret
|
||||
}))
|
||||
|
||||
jest.mock('@actions/io', () => ({
|
||||
rmRF: jest.fn(),
|
||||
mkdirP: jest.fn()
|
||||
}))
|
||||
|
||||
jest.mock('../src/fs-helper', () => ({
|
||||
directoryExistsSync: mockDirectoryExistsSync,
|
||||
fileExistsSync: mockFileExistsSync
|
||||
}))
|
||||
|
||||
jest.mock('../src/git-command-manager', () => ({
|
||||
MinimumGitSparseCheckoutVersion: {},
|
||||
createCommandManager: mockCreateCommandManager
|
||||
}))
|
||||
|
||||
jest.mock('../src/git-auth-helper', () => ({
|
||||
createAuthHelper: mockCreateAuthHelper
|
||||
}))
|
||||
|
||||
jest.mock('../src/git-directory-helper', () => ({
|
||||
prepareExistingDirectory: mockPrepareExistingDirectory
|
||||
}))
|
||||
|
||||
jest.mock('../src/github-api-helper', () => ({
|
||||
downloadRepository: jest.fn(),
|
||||
getDefaultBranch: jest.fn()
|
||||
}))
|
||||
|
||||
jest.mock('../src/ref-helper', () => ({
|
||||
getRefSpec: mockGetRefSpec,
|
||||
getCheckoutInfo: mockGetCheckoutInfo,
|
||||
testRef: mockTestRef,
|
||||
checkCommitInfo: mockCheckCommitInfo
|
||||
}))
|
||||
|
||||
jest.mock('../src/state-helper', () => ({
|
||||
setRepositoryPath: mockSetRepositoryPath
|
||||
}))
|
||||
|
||||
jest.mock('../src/url-helper', () => ({
|
||||
getFetchUrl: mockGetFetchUrl
|
||||
}))
|
||||
|
||||
jest.mock('../src/git-cache-helper', () => ({
|
||||
GitCacheHelper: jest.fn().mockImplementation(() => ({
|
||||
setupCache: mockSetupCache
|
||||
}))
|
||||
}))
|
||||
|
||||
import {getSource} from '../src/git-source-provider'
|
||||
|
||||
describe('getSource reference cache regression', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('updates the reference cache and reconfigures alternates for existing repositories', async () => {
|
||||
const repositoryPath = '/tmp/work/repo'
|
||||
const repositoryUrl = 'https://github.com/actions/checkout'
|
||||
const cachePath = '/tmp/reference-cache/actions-checkout.git'
|
||||
|
||||
const mockGit = {
|
||||
init: jest.fn(),
|
||||
remoteAdd: jest.fn(),
|
||||
referenceAdd: jest.fn().mockResolvedValue(undefined),
|
||||
tryDisableAutomaticGarbageCollection: jest.fn().mockResolvedValue(true),
|
||||
fetch: jest.fn().mockResolvedValue(undefined),
|
||||
version: jest.fn().mockResolvedValue({
|
||||
checkMinimum: jest.fn().mockReturnValue(true)
|
||||
}),
|
||||
disableSparseCheckout: jest.fn().mockResolvedValue(undefined),
|
||||
checkout: jest.fn().mockResolvedValue(undefined),
|
||||
log1: jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce('commit info')
|
||||
.mockResolvedValueOnce('0123456789abcdef'),
|
||||
lfsInstall: jest.fn(),
|
||||
submoduleSync: jest.fn(),
|
||||
submoduleUpdate: jest.fn(),
|
||||
submoduleForeach: jest.fn(),
|
||||
config: jest.fn()
|
||||
}
|
||||
|
||||
const mockAuthHelper = {
|
||||
configureAuth: jest.fn().mockResolvedValue(undefined),
|
||||
configureGlobalAuth: jest.fn().mockResolvedValue(undefined),
|
||||
configureSubmoduleAuth: jest.fn().mockResolvedValue(undefined),
|
||||
configureTempGlobalConfig: jest.fn().mockResolvedValue('/tmp/gitconfig'),
|
||||
removeAuth: jest.fn().mockResolvedValue(undefined),
|
||||
removeGlobalAuth: jest.fn().mockResolvedValue(undefined),
|
||||
removeGlobalConfig: jest.fn().mockResolvedValue(undefined)
|
||||
}
|
||||
|
||||
mockCreateCommandManager.mockResolvedValue(mockGit)
|
||||
mockCreateAuthHelper.mockReturnValue(mockAuthHelper)
|
||||
mockPrepareExistingDirectory.mockResolvedValue(undefined)
|
||||
mockGetFetchUrl.mockReturnValue(repositoryUrl)
|
||||
mockGetRefSpec.mockReturnValue(['+refs/heads/main:refs/remotes/origin/main'])
|
||||
mockTestRef.mockResolvedValue(true)
|
||||
mockGetCheckoutInfo.mockResolvedValue({
|
||||
ref: 'refs/heads/main',
|
||||
startPoint: 'refs/remotes/origin/main'
|
||||
})
|
||||
mockCheckCommitInfo.mockResolvedValue(undefined)
|
||||
mockSetupCache.mockResolvedValue(cachePath)
|
||||
mockFileExistsSync.mockReturnValue(false)
|
||||
mockDirectoryExistsSync.mockImplementation((targetPath: string) => {
|
||||
return (
|
||||
targetPath === repositoryPath ||
|
||||
targetPath === path.join(repositoryPath, '.git') ||
|
||||
targetPath === path.join(cachePath, 'objects')
|
||||
)
|
||||
})
|
||||
|
||||
await getSource({
|
||||
repositoryPath,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'checkout',
|
||||
ref: 'refs/heads/main',
|
||||
commit: '0123456789abcdef',
|
||||
clean: false,
|
||||
filter: undefined,
|
||||
sparseCheckout: undefined as any,
|
||||
sparseCheckoutConeMode: false,
|
||||
fetchDepth: 1,
|
||||
fetchDepthExplicit: true,
|
||||
fetchTags: false,
|
||||
showProgress: false,
|
||||
referenceCache: '/tmp/reference-cache',
|
||||
lfs: false,
|
||||
submodules: false,
|
||||
nestedSubmodules: false,
|
||||
authToken: 'token',
|
||||
sshKey: '',
|
||||
sshKnownHosts: '',
|
||||
sshStrict: true,
|
||||
sshUser: 'git',
|
||||
persistCredentials: false,
|
||||
workflowOrganizationId: undefined,
|
||||
githubServerUrl: 'https://github.com',
|
||||
setSafeDirectory: false
|
||||
} as any)
|
||||
|
||||
expect(mockGit.init).not.toHaveBeenCalled()
|
||||
expect(mockGit.remoteAdd).not.toHaveBeenCalled()
|
||||
expect(mockSetupCache).toHaveBeenCalledWith(mockGit, repositoryUrl)
|
||||
expect(mockGit.referenceAdd).toHaveBeenCalledWith(
|
||||
path.join(cachePath, 'objects')
|
||||
)
|
||||
})
|
||||
})
|
||||
163
__test__/git-source-provider.test.ts
Normal file
163
__test__/git-source-provider.test.ts
Normal file
@ -0,0 +1,163 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as fsHelper from '../src/fs-helper'
|
||||
import {GitCacheHelper} from '../src/git-cache-helper'
|
||||
import {
|
||||
adjustFetchDepthForCache,
|
||||
setupReferenceCache
|
||||
} from '../src/git-source-provider'
|
||||
|
||||
// Mock @actions/core
|
||||
jest.mock('@actions/core')
|
||||
|
||||
describe('adjustFetchDepthForCache', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing when referenceCache is not set', () => {
|
||||
const settings = {
|
||||
referenceCache: '',
|
||||
fetchDepth: 1,
|
||||
fetchDepthExplicit: false
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(1)
|
||||
expect(core.warning).not.toHaveBeenCalled()
|
||||
expect(core.info).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('overrides fetchDepth to 0 when referenceCache is set and fetchDepth is default', () => {
|
||||
const settings = {
|
||||
referenceCache: '/cache/git-reference-cache',
|
||||
fetchDepth: 1,
|
||||
fetchDepthExplicit: false
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(0)
|
||||
expect(core.info).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Overriding fetch-depth from 1 to 0')
|
||||
)
|
||||
expect(core.warning).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('warns but keeps fetchDepth when referenceCache is set and fetchDepth is explicit', () => {
|
||||
const settings = {
|
||||
referenceCache: '/cache/git-reference-cache',
|
||||
fetchDepth: 1,
|
||||
fetchDepthExplicit: true
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(1)
|
||||
expect(core.warning).toHaveBeenCalledWith(
|
||||
expect.stringContaining("'fetch-depth: 1' is set with reference-cache enabled")
|
||||
)
|
||||
expect(core.info).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing when referenceCache is set and fetchDepth is already 0 (explicit)', () => {
|
||||
const settings = {
|
||||
referenceCache: '/cache/git-reference-cache',
|
||||
fetchDepth: 0,
|
||||
fetchDepthExplicit: true
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(0)
|
||||
expect(core.warning).not.toHaveBeenCalled()
|
||||
expect(core.info).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing when referenceCache is set and fetchDepth is already 0 (default)', () => {
|
||||
const settings = {
|
||||
referenceCache: '/cache/git-reference-cache',
|
||||
fetchDepth: 0,
|
||||
fetchDepthExplicit: false
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(0)
|
||||
expect(core.warning).not.toHaveBeenCalled()
|
||||
expect(core.info).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('warns with correct depth value when explicit fetchDepth is > 1', () => {
|
||||
const settings = {
|
||||
referenceCache: '/cache/git-reference-cache',
|
||||
fetchDepth: 42,
|
||||
fetchDepthExplicit: true
|
||||
}
|
||||
adjustFetchDepthForCache(settings)
|
||||
expect(settings.fetchDepth).toBe(42)
|
||||
expect(core.warning).toHaveBeenCalledWith(
|
||||
expect.stringContaining("'fetch-depth: 42' is set with reference-cache enabled")
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setupReferenceCache', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing when referenceCache is not set', async () => {
|
||||
const git = {
|
||||
referenceAdd: jest.fn()
|
||||
} as any
|
||||
|
||||
await setupReferenceCache(git, '', 'https://github.com/actions/checkout.git')
|
||||
|
||||
expect(git.referenceAdd).not.toHaveBeenCalled()
|
||||
expect(core.startGroup).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('updates the cache and configures alternates when cache objects exist', async () => {
|
||||
const git = {
|
||||
referenceAdd: jest.fn().mockResolvedValue(undefined)
|
||||
} as any
|
||||
const setupCacheSpy = jest
|
||||
.spyOn(GitCacheHelper.prototype, 'setupCache')
|
||||
.mockResolvedValue('/tmp/reference-cache/repo.git')
|
||||
jest
|
||||
.spyOn(fsHelper, 'directoryExistsSync')
|
||||
.mockReturnValue(true)
|
||||
|
||||
await setupReferenceCache(
|
||||
git,
|
||||
'/tmp/reference-cache',
|
||||
'https://github.com/actions/checkout.git'
|
||||
)
|
||||
|
||||
expect(setupCacheSpy).toHaveBeenCalledWith(
|
||||
git,
|
||||
'https://github.com/actions/checkout.git'
|
||||
)
|
||||
expect(git.referenceAdd).toHaveBeenCalledWith(
|
||||
'/tmp/reference-cache/repo.git/objects'
|
||||
)
|
||||
})
|
||||
|
||||
it('warns when the cache objects directory is missing', async () => {
|
||||
const git = {
|
||||
referenceAdd: jest.fn().mockResolvedValue(undefined)
|
||||
} as any
|
||||
jest
|
||||
.spyOn(GitCacheHelper.prototype, 'setupCache')
|
||||
.mockResolvedValue('/tmp/reference-cache/repo.git')
|
||||
jest
|
||||
.spyOn(fsHelper, 'directoryExistsSync')
|
||||
.mockReturnValue(false)
|
||||
|
||||
await setupReferenceCache(
|
||||
git,
|
||||
'/tmp/reference-cache',
|
||||
'https://github.com/actions/checkout.git'
|
||||
)
|
||||
|
||||
expect(git.referenceAdd).not.toHaveBeenCalled()
|
||||
expect(core.warning).toHaveBeenCalledWith(
|
||||
'Reference repository cache objects directory /tmp/reference-cache/repo.git/objects does not exist'
|
||||
)
|
||||
})
|
||||
})
|
||||
@ -91,6 +91,7 @@ describe('input-helper tests', () => {
|
||||
expect(settings.repositoryOwner).toBe('some-owner')
|
||||
expect(settings.repositoryPath).toBe(gitHubWorkspace)
|
||||
expect(settings.setSafeDirectory).toBe(true)
|
||||
expect(settings.referenceCache || '').toBe('')
|
||||
})
|
||||
|
||||
it('qualifies ref', async () => {
|
||||
|
||||
@ -98,6 +98,12 @@ inputs:
|
||||
github-server-url:
|
||||
description: The base URL for the GitHub instance that you are trying to clone from, will use environment defaults to fetch from the same instance that the workflow is running from unless specified. Example URLs are https://github.com or https://my-ghes-server.example.com
|
||||
required: false
|
||||
reference-cache:
|
||||
description: >
|
||||
Path to a local directory used as a reference cache for Git clones. Over time,
|
||||
this directory will contain bare clones of the checked-out repositories (and their submodules).
|
||||
Using this significantly reduces network bandwidth and speeds up clones.
|
||||
required: false
|
||||
outputs:
|
||||
ref:
|
||||
description: 'The branch, tag or SHA that was checked out'
|
||||
|
||||
127
adrs/2303-reference-cache.md
Normal file
127
adrs/2303-reference-cache.md
Normal file
@ -0,0 +1,127 @@
|
||||
# ADR 2303: Reference cache for faster checkouts
|
||||
|
||||
**Date**: 2026-03-10
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
## Context
|
||||
|
||||
Repeated checkouts of the same repositories are expensive on runners with persistent storage.
|
||||
This is especially noticeable for self-hosted runners and custom runner images that execute
|
||||
many jobs against the same repositories and submodules.
|
||||
|
||||
Today, each checkout fetches objects from the remote even when the runner already has most of
|
||||
the repository history available locally from previous jobs. This increases network traffic,
|
||||
slows down checkout time, and makes recursive submodule initialization more expensive than
|
||||
necessary.
|
||||
|
||||
Git supports reference repositories and alternates, which allow one working repository to reuse
|
||||
objects from another local repository. This mechanism is a good fit for persistent runners,
|
||||
provided the cache is managed safely and works for both the main repository and submodules.
|
||||
|
||||
## Decision
|
||||
|
||||
Add an optional `reference-cache` input that points to a local directory used to store managed
|
||||
bare repositories for the primary repository and its submodules.
|
||||
|
||||
### Input
|
||||
|
||||
Add a new input in `action.yml`:
|
||||
|
||||
```yaml
|
||||
reference-cache:
|
||||
description: >
|
||||
Path to a local directory used as a reference cache for Git clones.
|
||||
```
|
||||
|
||||
The value is exposed through `settings.referenceCache`.
|
||||
|
||||
### Cache layout
|
||||
|
||||
Each cached repository is stored as a bare repository inside the configured cache directory.
|
||||
|
||||
The cache directory name is derived from the repository URL by:
|
||||
|
||||
- replacing non-alphanumeric characters with `_`
|
||||
- appending a short SHA-256 hash of the original URL to avoid collisions
|
||||
|
||||
Example:
|
||||
|
||||
```text
|
||||
<reference-cache>/https___github_com_actions_checkout_8f9b1c2a.git
|
||||
```
|
||||
|
||||
### Cache lifecycle
|
||||
|
||||
Introduce helper logic in `src/git-cache-helper.ts` responsible for:
|
||||
|
||||
- creating a bare cache repository with `git clone --bare`
|
||||
- updating an existing bare cache repository with `git fetch --force`
|
||||
- serializing access with file-based locking so concurrent jobs do not corrupt the cache
|
||||
- using a temporary clone-and-rename flow to avoid leaving behind partial repositories
|
||||
|
||||
### Main repository checkout
|
||||
|
||||
When `reference-cache` is configured:
|
||||
|
||||
- prepare or update the cache for the main repository URL
|
||||
- configure the checkout repository to use the cache through Git alternates
|
||||
- keep the working repository attached to the cache instead of dissociating it
|
||||
|
||||
This allows later fetch operations to reuse local objects instead of downloading them again.
|
||||
|
||||
### Submodules
|
||||
|
||||
When submodules are enabled together with `reference-cache`, submodules are processed one by one
|
||||
instead of relying solely on a monolithic `git submodule update --recursive` flow.
|
||||
|
||||
For each submodule:
|
||||
|
||||
- read the submodule URL from `.gitmodules`
|
||||
- resolve relative URLs where possible
|
||||
- create or update a dedicated cache for that submodule repository
|
||||
- run `git submodule update --init --reference <cache> <path>` for that submodule
|
||||
|
||||
When recursive submodules are requested, repeat the same process inside each initialized submodule.
|
||||
|
||||
### Fetch depth behavior
|
||||
|
||||
When `reference-cache` is enabled, shallow fetches are usually counterproductive because object
|
||||
negotiation overhead can outweigh the benefit of a local object store.
|
||||
|
||||
For that reason:
|
||||
|
||||
- the default `fetch-depth` is overridden to `0` when `reference-cache` is enabled
|
||||
- if the user explicitly sets `fetch-depth`, keep the user-provided value and emit a warning
|
||||
|
||||
### No `--dissociate`
|
||||
|
||||
The checkout should remain connected to the reference cache.
|
||||
|
||||
Using `--dissociate` would copy objects into the working repository and typically require extra
|
||||
repacking work, which reduces the performance benefit of the cache. If the cache is removed, the
|
||||
workspace is expected to be recreated, which is acceptable for the target runner scenarios.
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- reduces network traffic for repeated checkouts on persistent runners
|
||||
- improves checkout performance for the main repository and submodules
|
||||
- reuses standard Git mechanisms instead of introducing a custom object store
|
||||
- keeps cache naming deterministic and readable for administrators
|
||||
|
||||
### Trade-offs
|
||||
|
||||
- adds cache management complexity, including locking and recovery from interrupted operations
|
||||
- submodule handling becomes more complex because each submodule may require its own cache
|
||||
- benefits are limited on ephemeral runners, where the cache is not reused across jobs
|
||||
- workspaces remain dependent on the presence of the cache until they are recreated
|
||||
|
||||
## Acceptance criteria
|
||||
|
||||
1. The `reference-cache` input can be configured and is exposed through the action settings.
|
||||
2. Cache directories for the main repository and submodules follow the sanitized-URL-plus-hash naming scheme.
|
||||
3. The main checkout uses Git alternates so later fetches can reuse local cached objects.
|
||||
4. Submodules, including recursive submodules, can use repository-specific caches.
|
||||
5. The checkout does not use `--dissociate` and remains attached to the cache for performance.
|
||||
2472
dist/index.js
vendored
2472
dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
41
package-lock.json
generated
41
package-lock.json
generated
@ -19,6 +19,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@typescript-eslint/eslint-plugin": "^7.9.0",
|
||||
"@typescript-eslint/parser": "^7.9.0",
|
||||
@ -30,6 +31,7 @@
|
||||
"jest-circus": "^29.7.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"prettier": "^3.3.3",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.5.4"
|
||||
}
|
||||
@ -1523,6 +1525,23 @@
|
||||
"undici-types": "~7.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/proper-lockfile": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/proper-lockfile/-/proper-lockfile-4.1.4.tgz",
|
||||
"integrity": "sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/retry": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/retry": {
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz",
|
||||
"integrity": "sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/stack-utils": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz",
|
||||
@ -5980,6 +5999,18 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/proper-lockfile": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-4.1.2.tgz",
|
||||
"integrity": "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.4",
|
||||
"retry": "^0.12.0",
|
||||
"signal-exit": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/punycode": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
||||
@ -6135,6 +6166,16 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/retry": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
|
||||
"integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
}
|
||||
},
|
||||
"node_modules/reusify": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
|
||||
|
||||
@ -33,11 +33,13 @@
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.1",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@typescript-eslint/eslint-plugin": "^7.9.0",
|
||||
"@typescript-eslint/parser": "^7.9.0",
|
||||
|
||||
@ -21,6 +21,7 @@ export interface IGitAuthHelper {
|
||||
configureSubmoduleAuth(): Promise<void>
|
||||
configureTempGlobalConfig(): Promise<string>
|
||||
removeAuth(): Promise<void>
|
||||
removeGlobalAuth(): Promise<void>
|
||||
removeGlobalConfig(): Promise<void>
|
||||
}
|
||||
|
||||
@ -235,6 +236,12 @@ class GitAuthHelper {
|
||||
await this.removeToken()
|
||||
}
|
||||
|
||||
async removeGlobalAuth(): Promise<void> {
|
||||
core.debug('Removing global auth entries')
|
||||
await this.git.tryConfigUnset('include.path', true)
|
||||
await this.git.tryConfigUnset(this.insteadOfKey, true)
|
||||
}
|
||||
|
||||
async removeGlobalConfig(): Promise<void> {
|
||||
if (this.temporaryHomePath?.length > 0) {
|
||||
core.debug(`Unsetting HOME override`)
|
||||
|
||||
98
src/git-cache-helper.ts
Normal file
98
src/git-cache-helper.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
import * as crypto from 'crypto'
|
||||
import * as lockfile from 'proper-lockfile'
|
||||
import {IGitCommandManager} from './git-command-manager'
|
||||
|
||||
export class GitCacheHelper {
|
||||
constructor(private referenceCache: string) {}
|
||||
|
||||
/**
|
||||
* Prepares the reference cache for a given repository URL.
|
||||
* If the cache does not exist, it performs a bare clone.
|
||||
* If it exists, it performs a fetch to update it.
|
||||
* Returns the absolute path to the bare cache repository.
|
||||
*/
|
||||
async setupCache(git: IGitCommandManager, repositoryUrl: string): Promise<string> {
|
||||
const cacheDirName = this.generateCacheDirName(repositoryUrl)
|
||||
const cachePath = path.join(this.referenceCache, cacheDirName)
|
||||
|
||||
// Ensure the base cache directory exists before we try to lock inside it
|
||||
if (!fs.existsSync(this.referenceCache)) {
|
||||
await fs.promises.mkdir(this.referenceCache, { recursive: true })
|
||||
}
|
||||
|
||||
// We use a dedicated lock dir specifically for this repository's cache
|
||||
// since we cannot place a lock *inside* a repository that might not exist yet
|
||||
const lockfilePath = `${cachePath}.lock`
|
||||
|
||||
// Ensure the file we are locking exists
|
||||
if (!fs.existsSync(lockfilePath)) {
|
||||
await fs.promises.writeFile(lockfilePath, '')
|
||||
}
|
||||
|
||||
core.debug(`Acquiring lock for ${repositoryUrl} at ${lockfilePath}`)
|
||||
|
||||
let releaseLock: () => Promise<void>
|
||||
try {
|
||||
// proper-lockfile creates a ".lock" directory next to the target file.
|
||||
// We configure it to wait up to 10 minutes (600,000 ms) for another process to finish.
|
||||
// E.g. cloning a very large monorepo might take minutes.
|
||||
releaseLock = await lockfile.lock(lockfilePath, {
|
||||
retries: {
|
||||
retries: 60, // try 60 times
|
||||
factor: 1, // linear backoff
|
||||
minTimeout: 10000, // wait 10 seconds between tries
|
||||
maxTimeout: 10000, // (total max wait time: 600s = 10m)
|
||||
randomize: true
|
||||
}
|
||||
})
|
||||
core.debug(`Lock acquired.`)
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to acquire lock for repository cache ${repositoryUrl}: ${err}`)
|
||||
}
|
||||
|
||||
try {
|
||||
if (fs.existsSync(path.join(cachePath, 'objects'))) {
|
||||
core.info(`Reference cache for ${repositoryUrl} exists. Updating...`)
|
||||
const args = ['-C', cachePath, 'fetch', '--force', '--prune', '--tags', 'origin', '+refs/heads/*:refs/heads/*']
|
||||
await git.execGit(args)
|
||||
} else {
|
||||
core.info(`Reference cache for ${repositoryUrl} does not exist. Cloning --bare...`)
|
||||
|
||||
// Use a temporary clone pattern to prevent corrupted repos if process is killed mid-clone
|
||||
const tmpPath = `${cachePath}.tmp.${crypto.randomUUID()}`
|
||||
try {
|
||||
const args = ['-C', this.referenceCache, 'clone', '--bare', repositoryUrl, tmpPath]
|
||||
await git.execGit(args)
|
||||
|
||||
if (fs.existsSync(cachePath)) {
|
||||
// In rare cases where it somehow exists but objects/ didn't, clean it up
|
||||
await fs.promises.rm(cachePath, { recursive: true, force: true })
|
||||
}
|
||||
await fs.promises.rename(tmpPath, cachePath)
|
||||
} catch (cloneErr) {
|
||||
// Cleanup partial clone if an error occurred
|
||||
await fs.promises.rm(tmpPath, { recursive: true, force: true }).catch(() => {})
|
||||
throw cloneErr
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await releaseLock()
|
||||
}
|
||||
|
||||
return cachePath
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a directory name for the cache based on the URL.
|
||||
* Replaces non-alphanumeric characters with underscores
|
||||
* and appends a short SHA256 hash of the original URL.
|
||||
*/
|
||||
generateCacheDirName(url: string): string {
|
||||
const cleanUrl = url.replace(/[^a-zA-Z0-9]/g, '_')
|
||||
const hash = crypto.createHash('sha256').update(url).digest('hex').substring(0, 8)
|
||||
return `${cleanUrl}_${hash}.git`
|
||||
}
|
||||
}
|
||||
@ -15,6 +15,11 @@ import {GitVersion} from './git-version'
|
||||
export const MinimumGitVersion = new GitVersion('2.18')
|
||||
export const MinimumGitSparseCheckoutVersion = new GitVersion('2.28')
|
||||
|
||||
export class GitOutput {
|
||||
stdout = ''
|
||||
exitCode = 0
|
||||
}
|
||||
|
||||
export interface IGitCommandManager {
|
||||
branchDelete(remote: boolean, branch: string): Promise<void>
|
||||
branchExists(remote: boolean, pattern: string): Promise<boolean>
|
||||
@ -48,6 +53,7 @@ export interface IGitCommandManager {
|
||||
lfsFetch(ref: string): Promise<void>
|
||||
lfsInstall(): Promise<void>
|
||||
log1(format?: string): Promise<string>
|
||||
referenceAdd(referenceObjects: string): Promise<void>
|
||||
remoteAdd(remoteName: string, remoteUrl: string): Promise<void>
|
||||
removeEnvironmentVariable(name: string): void
|
||||
revParse(ref: string): Promise<string>
|
||||
@ -80,6 +86,12 @@ export interface IGitCommandManager {
|
||||
): Promise<string[]>
|
||||
tryReset(): Promise<boolean>
|
||||
version(): Promise<GitVersion>
|
||||
execGit(
|
||||
args: string[],
|
||||
allowAllExitCodes?: boolean,
|
||||
silent?: boolean,
|
||||
customListeners?: any
|
||||
): Promise<GitOutput>
|
||||
}
|
||||
|
||||
export async function createCommandManager(
|
||||
@ -401,6 +413,32 @@ class GitCommandManager {
|
||||
await this.execGit(['remote', 'add', remoteName, remoteUrl])
|
||||
}
|
||||
|
||||
async referenceAdd(referenceObjects: string): Promise<void> {
|
||||
const alternatesPath = path.join(
|
||||
this.workingDirectory,
|
||||
'.git',
|
||||
'objects',
|
||||
'info',
|
||||
'alternates'
|
||||
)
|
||||
core.info(`Configuring git alternate to reference objects at ${referenceObjects}`)
|
||||
const infoDir = path.dirname(alternatesPath)
|
||||
if (!fs.existsSync(infoDir)) {
|
||||
await fs.promises.mkdir(infoDir, { recursive: true })
|
||||
}
|
||||
|
||||
let existing = ''
|
||||
if (fs.existsSync(alternatesPath)) {
|
||||
existing = (await fs.promises.readFile(alternatesPath, 'utf8')).trim()
|
||||
}
|
||||
|
||||
const lines = existing ? existing.split('\n') : []
|
||||
if (!lines.includes(referenceObjects)) {
|
||||
lines.push(referenceObjects)
|
||||
await fs.promises.writeFile(alternatesPath, lines.join('\n') + '\n')
|
||||
}
|
||||
}
|
||||
|
||||
removeEnvironmentVariable(name: string): void {
|
||||
delete this.gitEnv[name]
|
||||
}
|
||||
@ -609,7 +647,7 @@ class GitCommandManager {
|
||||
return result
|
||||
}
|
||||
|
||||
private async execGit(
|
||||
async execGit(
|
||||
args: string[],
|
||||
allowAllExitCodes = false,
|
||||
silent = false,
|
||||
@ -746,7 +784,3 @@ class GitCommandManager {
|
||||
}
|
||||
}
|
||||
|
||||
class GitOutput {
|
||||
stdout = ''
|
||||
exitCode = 0
|
||||
}
|
||||
|
||||
@ -14,6 +14,182 @@ import {
|
||||
IGitCommandManager
|
||||
} from './git-command-manager'
|
||||
import {IGitSourceSettings} from './git-source-settings'
|
||||
import {GitCacheHelper} from './git-cache-helper'
|
||||
import * as fs from 'fs'
|
||||
|
||||
interface SubmoduleInfo {
|
||||
name: string
|
||||
path: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export async function setupReferenceCache(
|
||||
git: IGitCommandManager,
|
||||
referenceCache: string,
|
||||
repositoryUrl: string
|
||||
): Promise<void> {
|
||||
if (!referenceCache) {
|
||||
return
|
||||
}
|
||||
|
||||
core.startGroup('Setting up reference repository cache')
|
||||
try {
|
||||
const cacheHelper = new GitCacheHelper(referenceCache)
|
||||
const cachePath = await cacheHelper.setupCache(git, repositoryUrl)
|
||||
const cacheObjects = path.join(cachePath, 'objects')
|
||||
if (fsHelper.directoryExistsSync(cacheObjects, false)) {
|
||||
await git.referenceAdd(cacheObjects)
|
||||
} else {
|
||||
core.warning(
|
||||
`Reference repository cache objects directory ${cacheObjects} does not exist`
|
||||
)
|
||||
}
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function recursiveSubmoduleUpdate(
|
||||
git: IGitCommandManager,
|
||||
cacheHelper: GitCacheHelper,
|
||||
repositoryPath: string,
|
||||
fetchDepth: number,
|
||||
nestedSubmodules: boolean
|
||||
): Promise<void> {
|
||||
const gitmodulesPath = path.join(repositoryPath, '.gitmodules')
|
||||
if (!fs.existsSync(gitmodulesPath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const submodules = new Map<string, SubmoduleInfo>()
|
||||
|
||||
// Get all submodule config keys
|
||||
try {
|
||||
const output = await git.execGit([
|
||||
'-C', repositoryPath,
|
||||
'config', '--file', gitmodulesPath, '--get-regexp', 'submodule\\..*'
|
||||
], true, true)
|
||||
|
||||
const lines = output.stdout.split('\n').filter(l => l.trim().length > 0)
|
||||
for (const line of lines) {
|
||||
const match = line.match(/^submodule\.(.+?)\.(path|url)\s+(.*)$/)
|
||||
if (match) {
|
||||
const [, name, key, value] = match
|
||||
if (!submodules.has(name)) {
|
||||
submodules.set(name, { name, path: '', url: '' })
|
||||
}
|
||||
const info = submodules.get(name)!
|
||||
if (key === 'path') info.path = value
|
||||
if (key === 'url') info.url = value
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
core.warning(`Failed to read .gitmodules: ${err}`)
|
||||
return
|
||||
}
|
||||
|
||||
for (const info of submodules.values()) {
|
||||
if (!info.path || !info.url) continue
|
||||
|
||||
core.info(`Processing submodule ${info.name} at ${info.path}`)
|
||||
|
||||
// Resolve relative URLs or valid URLs
|
||||
let subUrl = info.url
|
||||
if (subUrl.startsWith('../') || subUrl.startsWith('./')) {
|
||||
// In checkout action, relative URLs are handled automatically by git.
|
||||
// But for our bare cache clone, we need an absolute URL.
|
||||
let originUrl = ''
|
||||
try {
|
||||
const originOut = await git.execGit(['-C', repositoryPath, 'remote', 'get-url', 'origin'], true, true)
|
||||
if (originOut.exitCode === 0) {
|
||||
originUrl = originOut.stdout.trim()
|
||||
}
|
||||
|
||||
if (originUrl) {
|
||||
try {
|
||||
if (originUrl.match(/^https?:\/\//)) {
|
||||
// Using Node's URL class to resolve relative paths for HTTP(s)
|
||||
const parsedOrigin = new URL(originUrl.replace(/\.git$/, ''))
|
||||
const resolvedUrl = new URL(subUrl, parsedOrigin.href + '/')
|
||||
subUrl = resolvedUrl.href
|
||||
} else {
|
||||
// Fallback for SSH URLs which new URL() cannot parse (e.g. git@github.com:org/repo)
|
||||
let originParts = originUrl.replace(/\.git$/, '').split('/')
|
||||
originParts.pop() // remove current repo
|
||||
|
||||
// Handle multiple ../
|
||||
let subTarget = subUrl
|
||||
while (subTarget.startsWith('../')) {
|
||||
if (originParts.length === 0) break // Can't go higher
|
||||
originParts.pop()
|
||||
subTarget = subTarget.substring(3)
|
||||
}
|
||||
if (subTarget.startsWith('./')) {
|
||||
subTarget = subTarget.substring(2)
|
||||
}
|
||||
|
||||
if (originParts.length > 0) {
|
||||
subUrl = originParts.join('/') + '/' + subTarget
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fallback does not work
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
if (!subUrl || subUrl.startsWith('../') || subUrl.startsWith('./')) {
|
||||
core.warning(`Could not resolve absolute URL for submodule ${info.name}. Falling back to standard clone.`)
|
||||
await invokeStandardSubmoduleUpdate(git, repositoryPath, fetchDepth, info.path)
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
// Prepare cache
|
||||
const cachePath = await cacheHelper.setupCache(git, subUrl)
|
||||
|
||||
// Submodule update for this specific one
|
||||
const args = ['-C', repositoryPath, '-c', 'protocol.version=2', 'submodule', 'update', '--init', '--force']
|
||||
if (fetchDepth > 0) {
|
||||
args.push(`--depth=${fetchDepth}`)
|
||||
}
|
||||
args.push('--reference', cachePath)
|
||||
args.push(info.path)
|
||||
|
||||
const output = await git.execGit(args, true)
|
||||
if (output.exitCode !== 0) {
|
||||
throw new Error(`Submodule update failed with exit code ${output.exitCode}`)
|
||||
}
|
||||
} catch (err) {
|
||||
core.warning(`Reference cache failed for submodule ${info.name} (${err}). Falling back to standard clone...`)
|
||||
await invokeStandardSubmoduleUpdate(git, repositoryPath, fetchDepth, info.path)
|
||||
}
|
||||
|
||||
// Recursive update inside the submodule
|
||||
if (nestedSubmodules) {
|
||||
const subRepoPath = path.join(repositoryPath, info.path)
|
||||
await recursiveSubmoduleUpdate(
|
||||
git,
|
||||
cacheHelper,
|
||||
subRepoPath,
|
||||
fetchDepth,
|
||||
nestedSubmodules
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function invokeStandardSubmoduleUpdate(git: IGitCommandManager, repositoryPath: string, fetchDepth: number, submodulePath: string) {
|
||||
const args = ['-C', repositoryPath, '-c', 'protocol.version=2', 'submodule', 'update', '--init', '--force']
|
||||
if (fetchDepth > 0) {
|
||||
args.push(`--depth=${fetchDepth}`)
|
||||
}
|
||||
args.push(submodulePath)
|
||||
await git.execGit(args)
|
||||
}
|
||||
|
||||
export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
// Repository URL
|
||||
@ -105,6 +281,19 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
// Save state for POST action
|
||||
stateHelper.setRepositoryPath(settings.repositoryPath)
|
||||
|
||||
// If we didn't initialize it above, do it now
|
||||
if (!authHelper) {
|
||||
authHelper = gitAuthHelper.createAuthHelper(git, settings)
|
||||
}
|
||||
|
||||
// Check if we need global auth setup early for reference cache
|
||||
// Global auth does not require a local .git directory
|
||||
if (settings.referenceCache) {
|
||||
core.startGroup('Setting up global auth for reference cache')
|
||||
await authHelper.configureGlobalAuth()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
// Initialize the repository
|
||||
if (
|
||||
!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))
|
||||
@ -115,6 +304,21 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
await setupReferenceCache(git, settings.referenceCache, repositoryUrl)
|
||||
|
||||
// Remove global auth if it was set for reference cache,
|
||||
// to avoid duplicate AUTHORIZATION headers during fetch
|
||||
if (settings.referenceCache) {
|
||||
core.startGroup('Removing global auth after reference cache setup')
|
||||
await authHelper.removeGlobalAuth()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
// Configure auth (must happen after git init so .git exists)
|
||||
core.startGroup('Setting up auth')
|
||||
await authHelper.configureAuth()
|
||||
core.endGroup()
|
||||
|
||||
// Disable automatic garbage collection
|
||||
core.startGroup('Disabling automatic garbage collection')
|
||||
if (!(await git.tryDisableAutomaticGarbageCollection())) {
|
||||
@ -124,15 +328,6 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
}
|
||||
core.endGroup()
|
||||
|
||||
// If we didn't initialize it above, do it now
|
||||
if (!authHelper) {
|
||||
authHelper = gitAuthHelper.createAuthHelper(git, settings)
|
||||
}
|
||||
// Configure auth
|
||||
core.startGroup('Setting up auth')
|
||||
await authHelper.configureAuth()
|
||||
core.endGroup()
|
||||
|
||||
// Determine the default branch
|
||||
if (!settings.ref && !settings.commit) {
|
||||
core.startGroup('Determining the default branch')
|
||||
@ -154,6 +349,10 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
await git.lfsInstall()
|
||||
}
|
||||
|
||||
// When using reference cache, fetch-depth > 0 is counterproductive:
|
||||
// objects are served from the local cache, so shallow negotiation only adds latency.
|
||||
adjustFetchDepthForCache(settings)
|
||||
|
||||
// Fetch
|
||||
core.startGroup('Fetching the repository')
|
||||
const fetchOptions: {
|
||||
@ -264,7 +463,21 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
|
||||
// Checkout submodules
|
||||
core.startGroup('Fetching submodules')
|
||||
await git.submoduleSync(settings.nestedSubmodules)
|
||||
await git.submoduleUpdate(settings.fetchDepth, settings.nestedSubmodules)
|
||||
|
||||
if (settings.referenceCache) {
|
||||
core.info('Recursive submodule update using reference cache')
|
||||
const cacheHelper = new GitCacheHelper(settings.referenceCache)
|
||||
await recursiveSubmoduleUpdate(
|
||||
git,
|
||||
cacheHelper,
|
||||
settings.repositoryPath,
|
||||
settings.fetchDepth,
|
||||
settings.nestedSubmodules
|
||||
)
|
||||
} else {
|
||||
await git.submoduleUpdate(settings.fetchDepth, settings.nestedSubmodules)
|
||||
}
|
||||
|
||||
await git.submoduleForeach(
|
||||
'git config --local gc.auto 0',
|
||||
settings.nestedSubmodules
|
||||
@ -373,3 +586,30 @@ async function getGitCommandManager(
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjusts fetchDepth when reference-cache is active.
|
||||
* Shallow fetches are counterproductive with a local cache because
|
||||
* objects are served from disk, making shallow negotiation pure overhead.
|
||||
*/
|
||||
export function adjustFetchDepthForCache(
|
||||
settings: Pick<
|
||||
IGitSourceSettings,
|
||||
'referenceCache' | 'fetchDepth' | 'fetchDepthExplicit'
|
||||
>
|
||||
): void {
|
||||
if (settings.referenceCache && settings.fetchDepth > 0) {
|
||||
if (settings.fetchDepthExplicit) {
|
||||
core.warning(
|
||||
`'fetch-depth: ${settings.fetchDepth}' is set with reference-cache enabled. ` +
|
||||
`This may slow down checkout because shallow negotiation bypasses the local cache. ` +
|
||||
`Consider using 'fetch-depth: 0' for best performance with reference-cache.`
|
||||
)
|
||||
} else {
|
||||
core.info(
|
||||
`Overriding fetch-depth from ${settings.fetchDepth} to 0 because reference-cache is enabled`
|
||||
)
|
||||
settings.fetchDepth = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -49,6 +49,11 @@ export interface IGitSourceSettings {
|
||||
*/
|
||||
fetchDepth: number
|
||||
|
||||
/**
|
||||
* Whether fetch-depth was explicitly set by the user
|
||||
*/
|
||||
fetchDepthExplicit: boolean
|
||||
|
||||
/**
|
||||
* Fetch tags, even if fetchDepth > 0 (default: false)
|
||||
*/
|
||||
@ -59,6 +64,11 @@ export interface IGitSourceSettings {
|
||||
*/
|
||||
showProgress: boolean
|
||||
|
||||
/**
|
||||
* The path to a local directory used as a reference cache for Git clones
|
||||
*/
|
||||
referenceCache: string
|
||||
|
||||
/**
|
||||
* Indicates whether to fetch LFS objects
|
||||
*/
|
||||
|
||||
@ -102,7 +102,9 @@ export async function getInputs(): Promise<IGitSourceSettings> {
|
||||
'TRUE'
|
||||
|
||||
// Fetch depth
|
||||
result.fetchDepth = Math.floor(Number(core.getInput('fetch-depth') || '1'))
|
||||
const fetchDepthInput = core.getInput('fetch-depth')
|
||||
result.fetchDepthExplicit = fetchDepthInput !== ''
|
||||
result.fetchDepth = Math.floor(Number(fetchDepthInput || '1'))
|
||||
if (isNaN(result.fetchDepth) || result.fetchDepth < 0) {
|
||||
result.fetchDepth = 0
|
||||
}
|
||||
@ -161,5 +163,9 @@ export async function getInputs(): Promise<IGitSourceSettings> {
|
||||
result.githubServerUrl = core.getInput('github-server-url')
|
||||
core.debug(`GitHub Host URL = ${result.githubServerUrl}`)
|
||||
|
||||
// Reference Cache
|
||||
result.referenceCache = core.getInput('reference-cache')
|
||||
core.debug(`Reference Cache = ${result.referenceCache}`)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user