diff --git a/src/scm.ts b/src/scm.ts index 9c630e4..1077205 100644 --- a/src/scm.ts +++ b/src/scm.ts @@ -62,7 +62,7 @@ type GitlabChange = { generated_file: boolean | null; }; -abstract class BaseScmAdapter { +export abstract class BaseScmAdapter { hostInfo: HostInfo; constructor(hostInfo: HostInfo) { this.hostInfo = hostInfo; @@ -96,7 +96,7 @@ abstract class BaseScmAdapter { parsedUrl = new URL(url); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (error) { - throw new Error(`Not avalid URL: ${url}`); + throw new Error(`Not a valid URL: ${url}`); } const commitInfo = this.testCommit(parsedUrl); @@ -347,7 +347,7 @@ class Github extends BaseScmAdapter { }); if (!response.ok) { throw new Error( - `Failed to fetch paginated data (page ${page}): ${response.statusText}`, + `Failed to retrieve paginated data (page ${page}): ${response.statusText}`, ); } itemsOnPage = await response.json(); @@ -432,7 +432,7 @@ class Github extends BaseScmAdapter { if (!response.ok) { throw new Error( - `Failed to retrieve commit details: ${response.statusText}`, + `Failed to retrieve pull details: ${response.statusText}`, ); } const info = await response.json(); @@ -545,7 +545,7 @@ class Gitlab extends BaseScmAdapter { }); if (!response.ok) { throw new Error( - `Failed to fetch paginated data (page ${page}): [${response.status}] ${response.statusText}`, + `Failed to retrieve paginated data (page ${page}): [${response.status}] ${response.statusText}`, ); } if (page === 1) { diff --git a/test/scm.details.test.ts b/test/scm.details.test.ts new file mode 100644 index 0000000..411dc23 --- /dev/null +++ b/test/scm.details.test.ts @@ -0,0 +1,251 @@ +import expect from 'expect.js'; +import { createScmAdaptersForTests, globalWithFetch } from './utils.js'; +const { gh, gl } = createScmAdaptersForTests(); + +function mockGithubPullFetch(fileCount = 120) { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { + const url = typeof input === 'string' ? input : input.url; + if ( + url.startsWith('https://api.github.com/repos/foo/bar/pulls/1') && + !url.includes('/files') + ) { + return Promise.resolve( + new Response( + JSON.stringify({ + base: { sha: 'baseSha' }, + head: { sha: 'headSha' }, + }), + { status: 200 }, + ), + ); + } + if (url.includes('/repos/foo/bar/pulls/1/files')) { + const page = Number(new URL(url).searchParams.get('page')) || 1; + if (fileCount <= 100) { + // Only one page + const batch = Array(fileCount).fill({ + filename: `f1.pkg`, + previous_filename: `f1.pkg`, + additions: 1, + deletions: 0, + status: 'modified', + sha: 'h', + blob_url: '', + raw_url: '', + content_url: '', + }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200 }), + ); + } else { + // Pagination: first page 100, second page fileCount-100 + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill({ + filename: `f${page}.pkg`, + previous_filename: `f${page}.pkg`, + additions: 1, + deletions: 0, + status: 'modified', + sha: 'h', + blob_url: '', + raw_url: '', + content_url: '', + }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200 }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +const GITLAB_FILE_BASE = { + diff: '+a\n-b', + + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, +}; + +function mockGitlabCommitFetch(fileCount = 120) { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { + const url = typeof input === 'string' ? input : input.url; + + if ( + url === + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc' + ) { + return Promise.resolve( + new Response(JSON.stringify({ parent_ids: ['p1'] }), { + status: 200, + }), + ); + } + if ( + url.startsWith( + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc/diff', + ) + ) { + const page = Number(new URL(url).searchParams.get('page')) || 1; + const gitlabFile = { + ...GITLAB_FILE_BASE, + new_path: `${page}.pkg`, + old_path: `${page}.pkg`, + }; + if (fileCount <= 100) { + const batch = Array(fileCount).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '1' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } else { + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '2' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +function mockGitlabPullFetch(fileCount = 120) { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { + const url = typeof input === 'string' ? input : input.url; + const u = new URL(url); + + if ( + u.pathname.toLocaleLowerCase() === + '/api/v4/projects/foo%2fbar/merge_requests/1' && + !u.searchParams.has('page') + ) { + return Promise.resolve( + new Response( + JSON.stringify({ + diff_refs: { base_sha: 'baseSha', head_sha: 'headSha' }, + }), + { status: 200, headers: new Headers() }, + ), + ); + } + if ( + u.pathname.toLocaleLowerCase() === + '/api/v4/projects/foo%2fbar/merge_requests/1/diffs' && + u.searchParams.has('page') + ) { + const page = Number(u.searchParams.get('page')) || 1; + const gitlabFile = { + ...GITLAB_FILE_BASE, + new_path: `${page}.pkg`, + old_path: `${page}.pkg`, + }; + if (fileCount <= 100) { + const batch = Array(fileCount).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '1' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } else { + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '2' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +function clearFetchMock() { + delete globalWithFetch.fetch; +} + +describe('Commit and Pull Details', () => { + describe('GitHub Adapter', () => { + describe('getPullDetails()', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + afterEach(clearFetchMock); + + it('collects all files across pull request pages (pagination)', async () => { + mockGithubPullFetch(120); + const prData = await gh.getPullDetails(fakePullInfo, 'token'); + expect(prData.info.base.sha).to.equal('baseSha'); + expect(prData.info.head.sha).to.equal('headSha'); + expect(prData.files).to.have.length(120); + expect(prData.files[0].filename).to.equal('f1.pkg'); + expect(prData.files[119].filename).to.equal('f2.pkg'); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGithubPullFetch(5); + const prData = await gh.getPullDetails(fakePullInfo, 'token'); + expect(prData.info.base.sha).to.equal('baseSha'); + expect(prData.info.head.sha).to.equal('headSha'); + expect(prData.files).to.have.length(5); + expect(prData.files[0].filename).to.equal('f1.pkg'); + expect(prData.files[4].filename).to.equal('f1.pkg'); + }); + }); + }); + + describe('GitLab Adapter', () => { + describe('getCommitDetails()', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: '123abc', + }; + + afterEach(clearFetchMock); + + it('collects all files across commit pages (pagination)', async () => { + mockGitlabCommitFetch(120); + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); + expect(commitData.sha).to.equal('123abc'); + expect(commitData.parents[0].sha).to.equal('p1'); + expect(commitData.files).to.have.length(120); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGitlabCommitFetch(5); + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); + expect(commitData.sha).to.equal('123abc'); + expect(commitData.parents[0].sha).to.equal('p1'); + expect(commitData.files).to.have.length(5); + }); + }); + + describe('getPullDetails()', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + afterEach(clearFetchMock); + + it('collects all files across merge request pages (pagination)', async () => { + mockGitlabPullFetch(120); + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); + expect(mrData.info.base.sha).to.equal('baseSha'); + expect(mrData.info.head.sha).to.equal('headSha'); + expect(mrData.files).to.have.length(120); + expect(mrData.files[0].filename).to.equal('1.pkg'); + expect(mrData.files[119].filename).to.equal('2.pkg'); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGitlabPullFetch(5); + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); + expect(mrData.info.base.sha).to.equal('baseSha'); + expect(mrData.info.head.sha).to.equal('headSha'); + expect(mrData.files).to.have.length(5); + expect(mrData.files[0].filename).to.equal('1.pkg'); + expect(mrData.files[4].filename).to.equal('1.pkg'); + }); + }); + }); +}); diff --git a/test/scm.download.test.ts b/test/scm.download.test.ts new file mode 100644 index 0000000..0a5a980 --- /dev/null +++ b/test/scm.download.test.ts @@ -0,0 +1,456 @@ +import { scmAdapters } from '../src/scm.js'; +import { ModifiedFile } from '../src/types.ts'; + +import expect from 'expect.js'; +import sinon, { SinonStub } from 'sinon'; +import browser from 'webextension-polyfill'; + +type DownloadDelta = { id: number; state: { current: string } }; +type DownloadDeltaCallback = (delta: DownloadDelta) => void; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let adapter: any; +let sandbox: sinon.SinonSandbox; +let downloadListeners: Array; +let downloadsStub: { + download: SinonStub; + search: SinonStub; + onChanged: { + addListener: (cb: DownloadDeltaCallback) => void; + removeListener: SinonStub; + }; + erase: SinonStub; +}; +let tabsStub: { update: SinonStub }; +let createObjectURLStub: SinonStub; +let revokeObjectURLStub: SinonStub; + +interface GlobalWithFetchStub extends GlobalThis { + fetch?: SinonStub<[input: RequestInfo | { url: string }], Promise>; + Buffer: typeof Buffer; +} + +const globalWithFetchStub = globalThis as unknown as GlobalWithFetchStub; + +beforeEach(() => { + sandbox = sinon.createSandbox(); + downloadListeners = []; + + adapter = new scmAdapters.github({ host: 'github.com', scm: 'github' }); + + downloadsStub = { + download: sandbox.stub().resolves(123), + search: sandbox.stub().resolves([{ filename: '/tmp/file.ext' }]), + onChanged: { + addListener: (cb: DownloadDeltaCallback) => { + downloadListeners.push(cb); + }, + removeListener: sandbox.stub(), + }, + erase: sandbox.stub().resolves(undefined), + }; + tabsStub = { + update: sandbox.stub().resolves(undefined), + }; + + sandbox.stub(browser, 'downloads').get(() => downloadsStub); + sandbox.stub(browser, 'tabs').get(() => tabsStub); + + globalWithFetchStub.fetch = sandbox.stub(); + + createObjectURLStub = sandbox + .stub(URL, 'createObjectURL') + .returns('blob://fake'); + revokeObjectURLStub = sandbox + .stub(URL, 'revokeObjectURL') + .callsFake(() => {}); + + globalWithFetchStub.Buffer = Buffer; +}); + +afterEach(() => { + sandbox.restore(); +}); + +describe('Download helpers', () => { + describe('calcShortHash()', () => { + it('returns first 8 characters', () => { + const res = adapter['calcShortHash']('abcdefghijklmnop'); + expect(res).to.equal('abcdefgh'); + }); + }); + + describe('downloadDummy()', () => { + it('calls doDownload with correct data URL and filename', async () => { + const ddSpy = sandbox + .stub(adapter, 'doDownload') + .resolves('/tmp/diff/file.ext'); + const out = await adapter['downloadDummy']('path/to/file.ext', '.X'); + expect( + ddSpy.calledWith( + 'data:text/ext;charset=utf-8,', + 'diff/file/file.X.ext', + ), + ).to.equal(true); + expect(out).to.equal('/tmp/diff/file.ext'); + }); + + it('uses correct mime type', async () => { + const spy = sandbox.stub(adapter, 'doDownload'); + await adapter['downloadDummy']('some/path/file.ext', '.X'); + expect( + spy.calledWith('data:text/ext;charset=utf-8,', 'diff/file/file.X.ext'), + ).to.be(true); + }); + }); + + describe('doDownload()', () => { + it('resolves when download completes and removes listener', async () => { + const promise = adapter['doDownload']('url', 'file.ext'); + await Promise.resolve(); + + const downloadStub = downloadsStub.download as SinonStub; + expect( + downloadStub.calledWith({ + url: 'url', + filename: 'file.ext', + conflictAction: 'overwrite', + }), + ).to.equal(true); + + downloadListeners.forEach((cb) => + cb({ id: 123, state: { current: 'complete' } }), + ); + + const filename = await promise; + expect(filename).to.equal('/tmp/file.ext'); + + expect(downloadsStub.onChanged.removeListener.called).to.equal(true); + expect(downloadsStub.erase.calledWith({ id: 123 })).to.equal(true); + }); + + it('throws if downloadId is undefined', async () => { + downloadsStub.download.resolves(undefined); + try { + await adapter['doDownload']('url', 'file.ext'); + expect().fail('Expected error not thrown'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.be('Failed to start download'); + } + }); + + it('throws if search returns no items', async () => { + const promise = adapter['doDownload']('url', 'file.ext'); + await Promise.resolve(); + downloadsStub.search.resolves([]); + downloadListeners.forEach((cb) => + cb({ id: 123, state: { current: 'complete' } }), + ); + try { + await promise; + expect().fail('Expected error not thrown'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.be('Failed to retrieve download item'); + } + }); + }); + + describe('doDownloadFile()', () => { + const apiUrl = 'https://api'; + const filename = 'dir/file.ext'; + const suffix = '.S'; + const token = 'token'; + const sha = 'abc123'; + const base64Content = Buffer.from('content').toString('base64'); + const fakeJson = { content: base64Content }; + + beforeEach(() => { + globalWithFetchStub.fetch = sandbox.stub().callsFake( + () => + Promise.resolve( + new Response(JSON.stringify(fakeJson), { + statusText: 'OK', + headers: new Headers(), + }), + ), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ) as any; + }); + + it('handles JSON type with ObjectURL support', async () => { + sandbox.stub(adapter, 'doDownload').resolves('/out.ext'); + + const out = await adapter['doDownloadFile']( + apiUrl, + 'json', + filename, + suffix, + token, + sha, + ); + + // Access createHeaders via a subclass for testing + const headers = adapter.createHeaders(token); + expect( + globalWithFetchStub.fetch!.calledWith(apiUrl, { + headers, + }), + ).to.be(true); + expect(createObjectURLStub.called).to.be(true); + expect(revokeObjectURLStub.called).to.be(true); + expect(out).to.be('/out.ext'); + }); + + it('throws a non-ok response', async () => { + globalWithFetchStub.fetch!.resolves( + new Response(null, { statusText: '404', status: 404 }), + ); + try { + await adapter['doDownloadFile']( + apiUrl, + 'json', + filename, + suffix, + token, + sha, + ); + expect().fail('Expected error not thrown'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match(/Failed to fetch file dir\/file.ext/); + } + }); + + it('handles RAW type with ObjectURL support', async () => { + const blobUrl = 'blob:fake-object-url'; + createObjectURLStub.returns(blobUrl); + const contentBytes = Uint8Array.from(Buffer.from('content')); + globalWithFetchStub.fetch!.resolves( + new Response(new Blob([contentBytes]), { + statusText: 'OK', + headers: new Headers(), + }), + ); + const ddStub = sandbox + .stub(adapter, 'doDownload') + .resolves('/withObjectURL.ext'); + const out = await adapter['doDownloadFile']( + apiUrl, + 'raw', + 'dir/file.ext', + '.S', + token, + sha, + ); + const expectedName = 'diff/file/file.S.ext'; + expect(ddStub.calledWith(blobUrl, expectedName)).to.be(true); + expect(out).to.be('/withObjectURL.ext'); + }); + + it('handles RAW type fallback (no ObjectURL) by building correct data URI', async () => { + (URL.createObjectURL as SinonStub).restore(); + (URL.createObjectURL as unknown) = undefined; + + const contentBytes = Uint8Array.from(Buffer.from('content')); + globalWithFetchStub.fetch!.resolves( + new Response(new Blob([contentBytes]), { + status: 200, + statusText: 'OK', + headers: new Headers(), + }), + ); + const ddStub = sandbox + .stub(adapter, 'doDownload') + .resolves('/fallback.ext'); + const out = await adapter['doDownloadFile']( + apiUrl, + 'raw', + 'dir/file.ext', + '.S', + token, + sha, + ); + // "content" -> Base64 "Y29udGVudA==" -> URI‑encoded "Y29udGVudA%3D%3D" + const expectedDataUrl = 'data:text/ext;base64,Y29udGVudA%3D%3D'; + const expectedName = 'diff/file/file.S.ext'; + expect(ddStub.calledWith(expectedDataUrl, expectedName)).to.be(true); + expect(out).to.be('/fallback.ext'); + }); + + it('throws on unknown type', async () => { + try { + await adapter['doDownloadFile']( + apiUrl, + 'xml' as unknown, + filename, + suffix, + token, + sha, + ); + expect().fail('Expected error not thrown'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.be('Unknown download type: xml'); + } + }); + + it('builds data URI when URL.createObjectURL is unavailable', async () => { + (URL.createObjectURL as SinonStub).restore(); + (URL.createObjectURL as unknown) = undefined; + + const payload = Buffer.from('hello').toString('base64'); + globalWithFetchStub.fetch!.resolves( + new Response(JSON.stringify({ content: payload }), { + statusText: 'OK', + headers: new Headers(), + }), + ); + + const dd = sandbox.stub(adapter, 'doDownload').resolves('/jf.json'); + const out = await adapter['doDownloadFile']( + 'u', + 'json', + 'dir/foo.txt', + '.S', + 'T', + 'sha', + ); + + const expected = `data:text/plain;base64,${payload}`; + expect(dd.calledWith(expected, 'diff/foo/foo.S.txt')).to.be(true); + expect(out).to.be('/jf.json'); + }); + }); + + describe('downloadDiff()', () => { + it('combines doDownloadFile and downloadDummy and updates the tab', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: false, + renamed: false, + deleted: true, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'raw', old: 'url/old', new: 'url/new' }, + }; + + const ddFileStub = sandbox + .stub(adapter, 'doDownloadFile') + .resolves('/old.ext'); + const dummyStub = sandbox + .stub(adapter, 'downloadDummy') + .resolves('/new.ext'); + + await adapter.downloadDiff(file, 'token'); + + expect( + ddFileStub.calledWith( + 'url/old', + 'raw', + 'file.ext', + '.shaOld.old', + 'token', + 'shaOld', + ), + ).to.be(true); + expect(dummyStub.calledWith('file.ext', '.shaNew.new')).to.be(true); + + const expectedUrl = encodeURI( + 'tracetronic://diff?file1=/old.ext&file2=/new.ext&cleanup=True', + ); + expect(tabsStub.update.calledWith({ url: expectedUrl })).to.be(true); + }); + + it('uses downloadDummy for old file when new=true and doDownloadFile for new file when not deleted', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: true, + renamed: false, + deleted: false, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'raw', old: 'url/old', new: 'url/new' }, + }; + + const dummyStub = sandbox + .stub(adapter, 'downloadDummy') + .resolves('/old.ext'); + const ddFileStub = sandbox + .stub(adapter, 'doDownloadFile') + .resolves('/new.ext'); + + await adapter.downloadDiff(file, 'token'); + expect(dummyStub.calledWith('file.ext', '.shaOld.old')).to.be(true); + + expect( + ddFileStub.calledWith( + 'url/new', + 'raw', + 'file.ext', + '.shaNew.new', + 'token', + 'shaNew', + ), + ).to.be(true); + const expectedUrl = encodeURI( + 'tracetronic://diff?file1=/old.ext&file2=/new.ext&cleanup=True', + ); + expect(tabsStub.update.calledWith({ url: expectedUrl })).to.be(true); + }); + }); + + describe('downloadFile()', () => { + it('calls doDownloadFile then updates the tab', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: true, + renamed: false, + deleted: false, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'json', old: 'url/old', new: 'url/new' }, + }; + + const ddFileStub = sandbox + .stub(adapter, 'doDownloadFile') + .resolves('/file.ext'); + await adapter.downloadFile(file, 'new', 'token'); + expect( + ddFileStub.calledWith( + 'url/new', + 'json', + 'file.ext', + '.shaNew.new', + 'token', + 'shaNew', + ), + ).to.be(true); + + await adapter.downloadFile(file, 'old', 'token'); + expect( + ddFileStub.calledWith( + 'url/old', + 'json', + 'file.ext', + '.shaOld.old', + 'token', + 'shaOld', + ), + ).to.be(true); + + const tabUrl = encodeURI('tracetronic:///' + '/file.ext'); + expect(tabsStub.update.calledWith({ url: tabUrl })).to.be(true); + }); + }); +}); diff --git a/test/scm.fetch-logic.test.ts b/test/scm.fetch-logic.test.ts new file mode 100644 index 0000000..06d1a1c --- /dev/null +++ b/test/scm.fetch-logic.test.ts @@ -0,0 +1,485 @@ +import { scmAdapters } from '../src/scm.js'; +import { ModifiedFile } from '../src/types.ts'; +import expect from 'expect.js'; +import sinon from 'sinon'; +import { createScmAdaptersForTests, globalWithFetch } from './utils.ts'; + +const { gh, gl } = createScmAdaptersForTests(); + +describe('Fetch Logic', () => { + describe('getCommitDetails()', () => { + context('GitHub', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: '123abc' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gh, 'getApiUrl').returns('https://api'); + sinon + .stub(gh, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'token tok' }); + }); + afterEach(() => sinon.restore()); + + it('returns parsed JSON when response.ok=true', async () => { + globalWithFetch.fetch = sinon.stub().resolves({ + ok: true, + statusText: 'OK', + json: async () => ({ sha: 'sha123', files: [] }), + }); + + const result = await gh.getCommitDetails(fakeInfo, token); + expect(result).to.eql({ sha: 'sha123', files: [] }); + }); + + it('throws if response.ok=false', async () => { + globalWithFetch.fetch = sinon.stub().resolves({ + ok: false, + statusText: 'Not Found', + }); + + try { + await gh.getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Failed to retrieve commit details: Not Found/, + ); + } + }); + }); + + context('GitLab diff error', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: 'abc123' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + }); + afterEach(() => sinon.restore()); + + it('throws if diff page 1 fetch fails', async () => { + globalWithFetch.fetch = sinon + .stub() + .onFirstCall() + .resolves({ ok: true, json: async () => ({ parent_ids: ['p1'] }) }) + .onSecondCall() + .resolves({ ok: false, status: 500, statusText: 'Internal Error' }); + + try { + await gl.getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match(/\[500\] Internal Error/); + } + }); + }); + + context('GitLab nested project group', () => { + const token = 'token'; + const fakeInfo = { + owner: 'group/subgroup', + repo: 'project', + commitHash: 'abc123', + }; + + beforeEach(() => { + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer token' }); + + sinon.replace( + globalThis, + 'fetch', + sinon + .stub() + .onFirstCall() + .resolves( + new Response(JSON.stringify({ parent_ids: ['p1'] }), { + status: 200, + headers: new Headers({ 'Content-Type': 'application/json' }), + }), + ) + .onSecondCall() + .resolves( + new Response( + JSON.stringify([ + { + diff: '+x\n-y', + new_path: 'file.pkg', + old_path: 'file.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + }, + ]), + { status: 200, headers: new Headers({ 'x-total-pages': '1' }) }, + ), + ), + ); + }); + + afterEach(() => sinon.restore()); + + it('handles commit from nested group project', async () => { + const result = await gl.getCommitDetails(fakeInfo, token); + expect(result).to.have.property('sha', 'abc123'); + expect(result.files).to.be.an('array'); + expect(result.files[0].filename).to.equal('file.pkg'); + }); + }); + }); + + describe('getPullDetails()', () => { + context('GitHub error paths', () => { + const token = 'token'; + const pullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + beforeEach(() => { + sinon.stub(gh, 'getApiUrl').returns('https://api'); + sinon + .stub(gh, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'token token' }); + }); + afterEach(() => sinon.restore()); + + it('throws when first page of pull-files is non-ok', async () => { + globalWithFetch.fetch = sinon + .stub() + .onFirstCall() + .resolves({ ok: true, json: async () => {} }) + .onSecondCall() + .resolves({ ok: false, statusText: 'Bad Gateway' }); + + try { + await gh.getPullDetails(pullInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Failed to retrieve paginated data \(page 1\): Bad Gateway/, + ); + } + }); + + it('throws if initial pull metadata fetch fails', async () => { + globalWithFetch.fetch = sinon.stub().resolves({ + ok: false, + statusText: 'Not Found', + }); + + try { + await gh.getPullDetails(pullInfo, token); + expect().fail('Expected error not thrown'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Failed to retrieve pull details: Not Found/, + ); + } + }); + }); + + context('GitLab MR diff error', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + }); + afterEach(() => sinon.restore()); + + it('throws if MR diff page 2 fetch fails', async () => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { + const urlStr = typeof input === 'string' ? input : input.url; + const u = new URL(urlStr); + + if ( + u.pathname.endsWith('/diffs') && + u.searchParams.get('page') === '1' + ) { + return Promise.resolve( + new Response( + JSON.stringify([ + { + diff: '+a\n-b', + new_path: 'f.pkg', + old_path: 'f.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]), + { status: 200, headers: new Headers({ 'x-total-pages': '2' }) }, + ), + ); + } + + if ( + u.pathname.endsWith('/diffs') && + u.searchParams.get('page') === '2' + ) { + return Promise.resolve( + new Response(null, { status: 502, statusText: 'Bad Gateway' }), + ); + } + + return Promise.resolve( + new Response(null, { + status: 500, + statusText: 'Unexpected fetch ' + urlStr, + }), + ); + }; + + try { + await gl.getPullDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Failed to retrieve paginated data \(page 2\): \[502\] Bad Gateway/, + ); + } + }); + + it('throws if final merge request metadata fetch fails', async () => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { + const url = typeof input === 'string' ? input : input.url; + const u = new URL(url); + if (u.pathname.endsWith('/diffs')) { + if (u.searchParams.get('page') === '1') { + return Promise.resolve( + new Response( + JSON.stringify([ + { + diff: '+a\n-b', + new_path: 'f.pkg', + old_path: 'f.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]), + { + status: 200, + headers: new Headers({ 'x-total-pages': '2' }), + }, + ), + ); + } + return Promise.resolve( + new Response(JSON.stringify([]), { + status: 200, + headers: new Headers(), + }), + ); + } + + return Promise.resolve( + new Response(null, { + status: 404, + statusText: 'Not Found', + headers: new Headers(), + }), + ); + }; + + try { + await gl.getPullDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Failed to retrieve merge request details: \[404\] Not Found/, + ); + } + }); + }); + }); + + describe('fetchModifiedFiles()', () => { + const cases = [ + { + name: 'GitHub', + Adapter: scmAdapters.github, + commitUrl: 'https://github.com/foo/bar/commit/123abc', + prUrl: 'https://github.com/foo/bar/pull/1', + hostInfo: { host: 'github.com', scm: 'github' as const }, + }, + { + name: 'GitLab', + Adapter: scmAdapters.gitlab, + commitUrl: 'https://gitlab.com/foo/bar/-/commit/123abc', + prUrl: 'https://gitlab.com/foo/bar/-/merge_requests/1', + hostInfo: { host: 'gitlab.com', scm: 'gitlab' as const }, + }, + ]; + + cases.forEach(({ name, Adapter, hostInfo, commitUrl, prUrl }) => { + describe(`${name} Adapter`, () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let adapter: any; + let fakeFiles: ModifiedFile[]; + let stubCommit: sinon.SinonStub; + let stubPull: sinon.SinonStub; + + beforeEach(() => { + adapter = new Adapter(hostInfo); + fakeFiles = [ + { + filename: 'x.ts', + filenameOld: 'x.ts', + new: false, + renamed: false, + deleted: false, + additions: 1, + deletions: 0, + shaOld: 'o', + shaNew: 'n', + download: { type: 'json', old: 'o', new: 'n' }, + }, + ]; + stubCommit = sinon.stub(adapter, 'handleCommit').resolves(fakeFiles); + stubPull = sinon + .stub(adapter, 'handlePullRequest') + .resolves(fakeFiles); + }); + + afterEach(() => sinon.restore()); + + it('calls handleCommit for commit URLs', async () => { + const result = await adapter.fetchModifiedFiles(commitUrl, 'token'); + expect(stubCommit.calledOnce).to.equal(true); + expect(stubPull.notCalled).to.equal(true); + expect(result).to.eql(fakeFiles); + }); + + it('calls handlePullRequest for pull/merge request URLs', async () => { + const result = await adapter.fetchModifiedFiles(prUrl, 'token'); + expect(stubPull.calledOnce).to.equal(true); + expect(stubCommit.notCalled).to.equal(true); + expect(result).to.eql(fakeFiles); + }); + + it('throws on malformed URL', async () => { + try { + await adapter.fetchModifiedFiles('not-a-url', 'token'); + throw new Error('Promise did not reject'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match(/Not a valid URL: not-a-url/); + } + }); + + it('throws if URL is neither commit nor pull/merge request', async () => { + try { + const badUrl = + name === 'GitHub' + ? 'https://github.com/foo/bar/issues/1' + : 'https://gitlab.com/foo/bar/-/issues/1'; + await adapter.fetchModifiedFiles(badUrl, 'token'); + throw new Error('Promise did not reject'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match( + /Not a GitHub commit or pull request page/, + ); + } + }); + }); + }); + }); + + describe('test()', () => { + describe('GitHub Adapter', () => { + it('returns true when fetch.ok is true', async () => { + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { status: 200, statusText: 'OK' }), + ); + const result = await gh.test('token'); + expect(result).to.equal(true); + }); + + it('returns false then fetch.ok is false', async () => { + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { status: 400, statusText: 'Bad Request' }), + ); + const result = await gh.test('token'); + expect(result).to.equal(false); + }); + + it('returns false on network error', async () => { + const errStub = sinon.stub(console, 'error'); + globalWithFetch.fetch = () => Promise.reject(new Error()); + const result = await gh.test('token'); + expect(result).to.equal(false); + errStub.restore(); + }); + }); + + describe('GitLab Adapter', () => { + it('returns true on 200 OK', async () => { + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 200, + headers: new Headers(), + }), + ); + const result = await gl.test('token'); + expect(result).to.equal(true); + }); + + it('returns true on 403 with x-gitlab-meta header', async () => { + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 403, + headers: { 'x-gitlab-meta': 'yes' }, + }), + ); + const result = await gl.test('token'); + expect(result).to.equal(true); + }); + + it('returns false on 403 without x-gitlab-meta header', async () => { + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 403, + statusText: 'Forbidden', + headers: new Headers(), + }), + ); + const result = await gl.test('token'); + expect(result).to.equal(false); + }); + + it('returns false on network error', async () => { + const errStub = sinon.stub(console, 'error'); + globalWithFetch.fetch = () => Promise.reject(new Error()); + const result = await gl.test('token'); + expect(result).to.equal(false); + errStub.restore(); + }); + }); + }); +}); diff --git a/test/scm.gitlab-internals.test.ts b/test/scm.gitlab-internals.test.ts new file mode 100644 index 0000000..a0df1f9 --- /dev/null +++ b/test/scm.gitlab-internals.test.ts @@ -0,0 +1,70 @@ +import expect from 'expect.js'; +import sinon from 'sinon'; +import { createScmAdaptersForTests, globalWithFetch } from './utils.js'; + +const { gl } = createScmAdaptersForTests(); + +describe('GitLab Adapter internals', () => { + it('parseStats() counts additions and deletions correctly', () => { + const stats = gl.parseStats('\n+a\n-b\n+c\n-d\n'); + expect(stats).to.eql({ additions: 2, deletions: 2 }); + }); + + it('parseStats() returns zeroes on empty diff', () => { + const stats = gl.parseStats(''); + expect(stats).to.eql({ additions: 0, deletions: 0 }); + }); + + it('processChanges() filters out unsupported filetypes', () => { + const raw = [ + { + diff: '+x\n-y', + new_path: 'keep.pkg', + old_path: 'keep.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + { + diff: '+x\n-y', + new_path: 'skip.txt', + old_path: 'skip.txt', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const out = gl.processChanges(raw as any); + expect(out).to.have.length(1); + expect(out[0].filename).to.equal('keep.pkg'); + }); + + it('getCommitDetails throws on initial metadata fetch failure', async () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: '123abc' }; + const token = 'tok'; + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + + globalWithFetch.fetch = sinon.stub().resolves({ + ok: false, + status: 401, + statusText: 'Unauthorized', + }); + + try { + await gl.getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; + expect(err.message).to.match(/\[401\] Unauthorized/); + } finally { + sinon.restore(); + } + }); +}); diff --git a/test/scm.mapping.test.ts b/test/scm.mapping.test.ts new file mode 100644 index 0000000..725e12c --- /dev/null +++ b/test/scm.mapping.test.ts @@ -0,0 +1,251 @@ +import { ModifiedFile } from '../src/types.ts'; +import expect from 'expect.js'; +import sinon, { SinonStub } from 'sinon'; +import { createScmAdaptersForTests } from './utils.ts'; +const { gh, gl } = createScmAdaptersForTests(); + +describe('Mapping & Filtering (response files to internal files)', () => { + describe('handleCommit()', () => { + context('GitHub Adapter', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: 'abc123', + }; + const fakeApiResponse = { + sha: 'sha', + parents: [{ sha: 'parentSha' }], + files: [ + { + filename: 'keep.pkg', + previous_filename: 'oldKeep.pkg', + additions: 2, + deletions: 1, + status: 'renamed', + sha: 'sha', + blob_url: '', + raw_url: '', + content_url: '', + }, + { + filename: 'skip.txt', + previous_filename: 'skip.txt', + additions: 1, + deletions: 0, + status: 'modified', + sha: 'sha', + blob_url: '', + raw_url: '', + content_url: '', + }, + ], + }; + + beforeEach(() => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sinon.stub(gh, 'getCommitDetails').resolves(fakeApiResponse as any); + }); + afterEach(() => sinon.restore()); + + it('filters and maps commit files correctly', async () => { + const result = await gh.handleCommit(fakeCommitInfo, 'token'); + expect(result).to.have.length(1); + const mf: ModifiedFile = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('parentSha'); + expect(mf.shaNew).to.equal('sha'); + expect(mf.download.new).to.match(/contents\/keep\.pkg\?ref=sha$/); + expect(mf.download.old).to.match( + /contents\/oldKeep\.pkg\?ref=parentSha$/, + ); + }); + + it('throws if commitData.files is missing or not an array', async () => { + (gh.getCommitDetails as SinonStub).restore(); + const stub = sinon + .stub(gh, 'getCommitDetails') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); + try { + await gh.handleCommit(fakeCommitInfo, 'token'); + throw new Error('Promise did not reject'); + } catch (err) { + expect(err.message).to.match(/Unable to retrieve modified files/); + } finally { + stub.restore(); + } + }); + }); + + context('GitLab Adapter', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: 'abc123', + }; + const fakeApiResponse = { + sha: 'sha', + parents: [{ sha: 'parentSha' }], + files: [ + { + filename: 'keep.pkg', + filenameOld: 'oldKeep.pkg', + new: false, + renamed: true, + deleted: false, + additions: 2, + deletions: 1, + }, + ], + }; + + beforeEach(() => { + sinon.stub(gl, 'getCommitDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('processes stats correctly', async () => { + const result = await gl.handleCommit(fakeCommitInfo, 'token'); + expect(result).to.have.length(1); + const mf: ModifiedFile = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.deleted).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('parentSha'); + expect(mf.shaNew).to.equal('sha'); + expect(mf.download.new).to.match( + /repository\/files\/keep\.pkg\/raw\?ref=sha$/, + ); + expect(mf.download.old).to.match( + /repository\/files\/oldKeep\.pkg\/raw\?ref=parentSha$/, + ); + }); + + it('throws if commitData.files is missing or not an array', async () => { + (gl.getCommitDetails as SinonStub).restore(); + const stub = sinon + .stub(gl, 'getCommitDetails') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); + try { + await gl.handleCommit(fakeCommitInfo, 'token'); + throw new Error('Promise did not reject'); + } catch (err) { + expect(err.message).to.match(/Unable to retrieve modified files/); + } finally { + stub.restore(); + } + }); + }); + }); + + describe('handlePullRequest()', () => { + context('GitHub Adapter', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const fakeApiResponse = { + info: { base: { sha: 'baseSha' }, head: { sha: 'headSha' } }, + files: [ + { + additions: 2, + deletions: 1, + filename: 'keep.pkg', + previous_filename: 'oldKeep.pkg', + sha: 'headSha', + status: 'renamed', + blob_url: '', + raw_url: '', + content_url: '', + }, + { + additions: 1, + deletions: 0, + filename: 'skip.txt', + previous_filename: 'skip.txt', + sha: 'headSha', + status: 'modified', + blob_url: '', + raw_url: '', + content_url: '', + }, + ], + }; + + beforeEach(() => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sinon.stub(gh, 'getPullDetails').resolves(fakeApiResponse as any); + }); + afterEach(() => sinon.restore()); + + it('filters and maps pull request files correctly', async () => { + const result: ModifiedFile[] = await gh.handlePullRequest( + fakePullInfo, + 'token', + ); + expect(result).to.have.length(1); + const mf = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.deleted).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('baseSha'); + expect(mf.shaNew).to.equal('headSha'); + expect(mf.download.old).to.match(/contents\/keep\.pkg\?ref=baseSha$/); + expect(mf.download.new).to.match(/contents\/keep\.pkg\?ref=headSha$/); + }); + }); + + context('GitLab Adapter', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const fakeApiResponse = { + info: { base: { sha: 'baseSha' }, head: { sha: 'headSha' } }, + files: [ + { + filename: 'keep.pkg', + filenameOld: 'oldKeep.pkg', + additions: 2, + deletions: 1, + new: false, + renamed: true, + deleted: false, + }, + ], + }; + + beforeEach(() => { + sinon.stub(gl, 'getPullDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('maps merge request files correctly', async () => { + const result: ModifiedFile[] = await gl.handlePullRequest( + fakePullInfo, + 'token', + ); + expect(result).to.have.length(1); + const mf = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.deleted).to.equal(false); + expect(mf.shaOld).to.equal('baseSha'); + expect(mf.shaNew).to.equal('headSha'); + expect(mf.download.old).to.match(/raw\?ref=baseSha$/); + expect(mf.download.new).to.match(/raw\?ref=headSha$/); + }); + }); + }); +}); diff --git a/test/scm.methods.test.ts b/test/scm.methods.test.ts new file mode 100644 index 0000000..5acc8ad --- /dev/null +++ b/test/scm.methods.test.ts @@ -0,0 +1,83 @@ +import { scmAdapters } from '../src/scm.js'; +import { SUPPORTED_FILES } from '../src/types.ts'; +import expect from 'expect.js'; + +const adapterCases = [ + { + name: 'GitHub', + Class: scmAdapters.github, + host: 'github.com', + customHost: 'gh.custom', + expectedApiUrl: 'https://api.github.com', + expectedCustomApiUrl: 'https://gh.custom/api/v3', + tokenPrefix: 'token', + scm: 'github' as const, + }, + { + name: 'GitLab', + Class: scmAdapters.gitlab, + host: 'gitlab.com', + customHost: 'gl.custom', + expectedApiUrl: 'https://gitlab.com/api/v4', + expectedCustomApiUrl: 'https://gl.custom/api/v4', + tokenPrefix: 'Bearer', + scm: 'gitlab' as const, + }, +]; + +describe('Adapter Methods', () => { + describe('getApiUrl()', () => { + adapterCases.forEach( + ({ + name, + Class, + host, + customHost, + expectedApiUrl, + expectedCustomApiUrl, + scm, + }) => { + it(`${name} default host returns correct API URL`, () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); + expect(adapter.getApiUrl()).to.equal(expectedApiUrl); + }); + it(`${name} custom host returns correct API URL`, () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host: customHost, scm }); + expect(adapter.getApiUrl()).to.equal(expectedCustomApiUrl); + }); + }, + ); + }); + + describe('createHeaders()', () => { + adapterCases.forEach(({ name, Class, host, tokenPrefix, scm }) => { + it(`${name} adds correct Authorization header`, () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); + const headers = adapter.createHeaders('abc123'); + expect(headers).to.have.property( + 'Authorization', + `${tokenPrefix} abc123`, + ); + }); + }); + }); + + describe('isSupportedFile()', () => { + adapterCases.forEach(({ name, Class, host, scm }) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); + it(`${name} returns true for supported extensions`, () => { + SUPPORTED_FILES.forEach((ext) => { + expect(adapter.isSupportedFile(`file.${ext}`)).to.equal(true); + }); + }); + it(`${name} returns false for unsupported or missing extensions`, () => { + expect(adapter.isSupportedFile('file.unknownext')).to.equal(false); + expect(adapter.isSupportedFile('file')).to.equal(false); + }); + }); + }); +}); diff --git a/test/scm.url.detection.test.ts b/test/scm.url.detection.test.ts new file mode 100644 index 0000000..b0762af --- /dev/null +++ b/test/scm.url.detection.test.ts @@ -0,0 +1,93 @@ +import expect from 'expect.js'; +import { createScmAdaptersForTests } from './utils.js'; + +const GITHUB_REPO = 'https://github.com/foo/bar'; +const GITLAB_REPO = 'https://gitlab.com/foo/bar'; +const GITHUB_PR_VARIANTS = [ + '', + 'commits', + 'commits/123abc', + 'checks', + 'files', + 'unexisting_subpage', +]; +const GITLAB_MR_VARIANTS = [ + '', + 'commits', + 'commits/123abc', + 'pipelines', + 'diffs', + 'unexisting_subpage', +]; + +const { gh, gl } = createScmAdaptersForTests(); + +describe('URL Detection', () => { + describe('GitHub Adapter', () => { + it('recognizes valid commit URLs', () => { + const url = new URL(`${GITHUB_REPO}/commit/123abc`); + expect(gh.testCommit(url)).to.not.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); + }); + + it('returns null for invalid commit URLs', () => { + [ + `${GITHUB_REPO}/commit`, + `${GITHUB_REPO}/commits`, + `${GITHUB_REPO}/commit/`, + ].forEach((urlStr) => { + const url = new URL(urlStr); + expect(gh.testCommit(url)).to.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); + }); + }); + + it('recognizes valid pull request URLs (including subpages)', () => { + GITHUB_PR_VARIANTS.forEach((suffix) => { + const url = new URL(`${GITHUB_REPO}/pull/1/${suffix}`); + expect(gh.testPullRequest(url)).to.not.equal(null); + expect(gh.testCommit(url)).to.equal(null); + }); + }); + + it('returns null for invalid pull request URLs', () => { + const url = new URL(`${GITHUB_REPO}/pull`); + expect(gh.testCommit(url)).to.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); + }); + }); + + describe('GitLab Adapter', () => { + it('recognizes valid commit URLs', () => { + const url = new URL(`${GITLAB_REPO}/-/commit/123abc`); + expect(gl.testCommit(url)).to.not.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); + }); + + it('returns null for invalid commit URLs', () => { + [ + `${GITLAB_REPO}/-/commit`, + `${GITLAB_REPO}/-/commits`, + `${GITLAB_REPO}/-/commit/`, + ].forEach((urlStr) => { + const url = new URL(urlStr); + expect(gl.testCommit(url)).to.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); + }); + }); + + it('recognizes valid merge request URLs (including subpages)', () => { + GITLAB_MR_VARIANTS.forEach((suffix) => { + const url = new URL(`${GITLAB_REPO}/-/merge_requests/1/${suffix}`); + expect(gl.testPullRequest(url)).to.not.equal(null); + expect(gl.testCommit(url)).to.equal(null); + }); + }); + + it('returns null for invalid merge request URLs', () => { + const url = new URL(`${GITLAB_REPO}/-/merge_requests`); + expect(gl.testCommit(url)).to.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); + }); + }); +}); diff --git a/test/utils.ts b/test/utils.ts new file mode 100644 index 0000000..83a6b3d --- /dev/null +++ b/test/utils.ts @@ -0,0 +1,44 @@ +import { BaseScmAdapter, scmAdapters } from '../src/scm.js'; + +interface GlobalWithFetch extends GlobalThis { + fetch?: (input: RequestInfo | { url: string }) => Promise; +} + +export const globalWithFetch = globalThis as GlobalWithFetch; + +export type InternalAdapterMethodsGithub = BaseScmAdapter & { + testCommit: typeof scmAdapters.github.prototype.testCommit; + testPullRequest: typeof scmAdapters.github.prototype.testPullRequest; + handlePullRequest: typeof scmAdapters.github.prototype.handlePullRequest; + getPullDetails: typeof scmAdapters.github.prototype.getPullDetails; + handleCommit: typeof scmAdapters.github.prototype.handleCommit; + getCommitDetails: typeof scmAdapters.github.prototype.getCommitDetails; + getApiUrl: typeof scmAdapters.github.prototype.getApiUrl; + createHeaders: typeof scmAdapters.github.prototype.createHeaders; +}; + +export type InternalAdapterMethodsGitlab = BaseScmAdapter & { + testCommit: typeof scmAdapters.gitlab.prototype.testCommit; + testPullRequest: typeof scmAdapters.gitlab.prototype.testPullRequest; + handlePullRequest: typeof scmAdapters.gitlab.prototype.handlePullRequest; + getPullDetails: typeof scmAdapters.gitlab.prototype.getPullDetails; + handleCommit: typeof scmAdapters.gitlab.prototype.handleCommit; + getCommitDetails: typeof scmAdapters.gitlab.prototype.getCommitDetails; + getApiUrl: typeof scmAdapters.gitlab.prototype.getApiUrl; + createHeaders: typeof scmAdapters.gitlab.prototype.createHeaders; + parseStats: typeof scmAdapters.gitlab.prototype.parseStats; + processChanges: typeof scmAdapters.gitlab.prototype.processChanges; +}; + +// Helper to create scm adapter instances for tests that make internal methods accessible +export function createScmAdaptersForTests() { + const gh = new scmAdapters.github({ + host: 'github.com', + scm: 'github', + }) as unknown as InternalAdapterMethodsGithub; + const gl = new scmAdapters.gitlab({ + host: 'gitlab.com', + scm: 'gitlab', + }) as unknown as InternalAdapterMethodsGitlab; + return { gh, gl }; +}