From 615be72f2f29b5e4f2bc09cbb2bee168b9744d00 Mon Sep 17 00:00:00 2001 From: Mario Schlicht <196292603+mschlicht-tt@users.noreply.github.com> Date: Tue, 26 Aug 2025 09:56:06 +0200 Subject: [PATCH 1/2] Add many unittests for scm.ts --- src/scm.ts | 8 +- test/scm.details.test.ts | 259 +++++++++++++++++ test/scm.download.test.ts | 423 +++++++++++++++++++++++++++ test/scm.fetch-logic.test.ts | 459 ++++++++++++++++++++++++++++++ test/scm.gitlab-internals.test.ts | 68 +++++ test/scm.mapping.test.ts | 249 ++++++++++++++++ test/scm.methods.test.ts | 83 ++++++ test/scm.url.detection.test.ts | 101 +++++++ 8 files changed, 1646 insertions(+), 4 deletions(-) create mode 100644 test/scm.details.test.ts create mode 100644 test/scm.download.test.ts create mode 100644 test/scm.fetch-logic.test.ts create mode 100644 test/scm.gitlab-internals.test.ts create mode 100644 test/scm.mapping.test.ts create mode 100644 test/scm.methods.test.ts create mode 100644 test/scm.url.detection.test.ts diff --git a/src/scm.ts b/src/scm.ts index 9c630e4..1df6db1 100644 --- a/src/scm.ts +++ b/src/scm.ts @@ -96,7 +96,7 @@ abstract class BaseScmAdapter { parsedUrl = new URL(url); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (error) { - throw new Error(`Not avalid URL: ${url}`); + throw new Error(`Not a valid URL: ${url}`); } const commitInfo = this.testCommit(parsedUrl); @@ -347,7 +347,7 @@ class Github extends BaseScmAdapter { }); if (!response.ok) { throw new Error( - `Failed to fetch paginated data (page ${page}): ${response.statusText}`, + `Failed to retrieve paginated data (page ${page}): ${response.statusText}`, ); } itemsOnPage = await response.json(); @@ -432,7 +432,7 @@ class Github extends BaseScmAdapter { if (!response.ok) { throw new Error( - `Failed to retrieve commit details: ${response.statusText}`, + `Failed to retrieve pull details: ${response.statusText}`, ); } const info = await response.json(); @@ -545,7 +545,7 @@ class Gitlab extends BaseScmAdapter { }); if (!response.ok) { throw new Error( - `Failed to fetch paginated data (page ${page}): [${response.status}] ${response.statusText}`, + `Failed to retrieve paginated data (page ${page}): [${response.status}] ${response.statusText}`, ); } if (page === 1) { diff --git a/test/scm.details.test.ts b/test/scm.details.test.ts new file mode 100644 index 0000000..c29634b --- /dev/null +++ b/test/scm.details.test.ts @@ -0,0 +1,259 @@ +import { scmAdapters } from '../src/scm.js'; +import expect from 'expect.js'; + +const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); +const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); + +function mockGithubPullFetch(fileCount = 120) { + (globalThis as any).fetch = (input: any) => { + const url = typeof input === 'string' ? input : input.url; + if ( + url.startsWith('https://api.github.com/repos/foo/bar/pulls/1') && + !url.includes('/files') + ) { + return Promise.resolve( + new Response( + JSON.stringify({ + base: { sha: 'baseSha' }, + head: { sha: 'headSha' }, + }), + { status: 200 }, + ), + ); + } + if (url.includes('/repos/foo/bar/pulls/1/files')) { + const page = Number(new URL(url).searchParams.get('page')) || 1; + if (fileCount <= 100) { + // Only one page + const batch = Array(fileCount).fill({ + filename: `f1.pkg`, + previous_filename: `f1.pkg`, + additions: 1, + deletions: 0, + status: 'modified', + sha: 'h', + blob_url: '', + raw_url: '', + content_url: '', + }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200 }), + ); + } else { + // Pagination: first page 100, second page fileCount-100 + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill({ + filename: `f${page}.pkg`, + previous_filename: `f${page}.pkg`, + additions: 1, + deletions: 0, + status: 'modified', + sha: 'h', + blob_url: '', + raw_url: '', + content_url: '', + }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200 }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +const GITLAB_FILE_BASE = { + diff: '+a\n-b', + + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, +}; + +function mockGitlabCommitFetch(fileCount = 120) { + (globalThis as any).fetch = (input: any) => { + const url = typeof input === 'string' ? input : input.url; + + if ( + url === + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc' + ) { + return Promise.resolve( + new Response(JSON.stringify({ parent_ids: ['p1'] }), { + status: 200, + }), + ); + } + if ( + url.startsWith( + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc/diff', + ) + ) { + const page = Number(new URL(url).searchParams.get('page')) || 1; + const gitlabFile = { + ...GITLAB_FILE_BASE, + new_path: `${page}.pkg`, + old_path: `${page}.pkg`, + }; + if (fileCount <= 100) { + const batch = Array(fileCount).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '1' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } else { + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '2' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +function mockGitlabPullFetch(fileCount = 120) { + (globalThis as any).fetch = (input: any) => { + const url = typeof input === 'string' ? input : input.url; + const u = new URL(url); + + if ( + u.pathname.toLocaleLowerCase() === + '/api/v4/projects/foo%2fbar/merge_requests/1' && + !u.searchParams.has('page') + ) { + return Promise.resolve( + new Response( + JSON.stringify({ + diff_refs: { base_sha: 'baseSha', head_sha: 'headSha' }, + }), + { status: 200, headers: new Headers() }, + ), + ); + } + if ( + u.pathname.toLocaleLowerCase() === + '/api/v4/projects/foo%2fbar/merge_requests/1/diffs' && + u.searchParams.has('page') + ) { + const page = Number(u.searchParams.get('page')) || 1; + const gitlabFile = { + ...GITLAB_FILE_BASE, + new_path: `${page}.pkg`, + old_path: `${page}.pkg`, + }; + if (fileCount <= 100) { + const batch = Array(fileCount).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '1' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } else { + const count = page === 1 ? 100 : fileCount - 100; + const batch = Array(count).fill(gitlabFile); + const headers = new Headers({ 'x-total-pages': '2' }); + return Promise.resolve( + new Response(JSON.stringify(batch), { status: 200, headers }), + ); + } + } + return Promise.reject(new Error('Unexpected fetch: ' + url)); + }; +} + +function clearFetchMock() { + delete (globalThis as any).fetch; +} + +describe('Commit and Pull Details', () => { + describe('GitHub Adapter', () => { + describe('getPullDetails()', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + afterEach(clearFetchMock); + + it('collects all files across pull request pages (pagination)', async () => { + mockGithubPullFetch(120); + const prData = await (gh as any).getPullDetails(fakePullInfo, 'token'); + expect(prData.info.base.sha).to.equal('baseSha'); + expect(prData.info.head.sha).to.equal('headSha'); + expect(prData.files).to.have.length(120); + expect(prData.files[0].filename).to.equal('f1.pkg'); + expect(prData.files[119].filename).to.equal('f2.pkg'); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGithubPullFetch(5); + const prData = await (gh as any).getPullDetails(fakePullInfo, 'token'); + expect(prData.info.base.sha).to.equal('baseSha'); + expect(prData.info.head.sha).to.equal('headSha'); + expect(prData.files).to.have.length(5); + expect(prData.files[0].filename).to.equal('f1.pkg'); + expect(prData.files[4].filename).to.equal('f1.pkg'); + }); + }); + }); + + describe('GitLab Adapter', () => { + describe('getCommitDetails()', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: '123abc', + }; + + afterEach(clearFetchMock); + + it('collects all files across commit pages (pagination)', async () => { + mockGitlabCommitFetch(120); + const commitData = await (gl as any).getCommitDetails( + fakeCommitInfo, + 'token', + ); + expect(commitData.sha).to.equal('123abc'); + expect(commitData.parents[0].sha).to.equal('p1'); + expect(commitData.files).to.have.length(120); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGitlabCommitFetch(5); + const commitData = await (gl as any).getCommitDetails( + fakeCommitInfo, + 'token', + ); + expect(commitData.sha).to.equal('123abc'); + expect(commitData.parents[0].sha).to.equal('p1'); + expect(commitData.files).to.have.length(5); + }); + }); + + describe('getPullDetails()', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + afterEach(clearFetchMock); + + it('collects all files across merge request pages (pagination)', async () => { + mockGitlabPullFetch(120); + const mrData = await (gl as any).getPullDetails(fakePullInfo, 'token'); + expect(mrData.info.base.sha).to.equal('baseSha'); + expect(mrData.info.head.sha).to.equal('headSha'); + expect(mrData.files).to.have.length(120); + expect(mrData.files[0].filename).to.equal('1.pkg'); + expect(mrData.files[119].filename).to.equal('2.pkg'); + }); + + it('collects all files when only one page is returned (no pagination)', async () => { + mockGitlabPullFetch(5); + const mrData = await (gl as any).getPullDetails(fakePullInfo, 'token'); + expect(mrData.info.base.sha).to.equal('baseSha'); + expect(mrData.info.head.sha).to.equal('headSha'); + expect(mrData.files).to.have.length(5); + expect(mrData.files[0].filename).to.equal('1.pkg'); + expect(mrData.files[4].filename).to.equal('1.pkg'); + }); + }); + }); +}); diff --git a/test/scm.download.test.ts b/test/scm.download.test.ts new file mode 100644 index 0000000..e7dbe60 --- /dev/null +++ b/test/scm.download.test.ts @@ -0,0 +1,423 @@ +import { scmAdapters } from '../src/scm.js'; +import { ModifiedFile } from '../src/types.ts'; +import expect from 'expect.js'; +import sinon, { SinonStub } from 'sinon'; +import browser from 'webextension-polyfill'; + +let adapter: any; +let sandbox: sinon.SinonSandbox; +let downloadListeners: Array<(delta: any) => void>; +let downloadsStub: any; +let tabsStub: any; +let createObjectURLStub: SinonStub; +let revokeObjectURLStub: SinonStub; + +beforeEach(() => { + sandbox = sinon.createSandbox(); + downloadListeners = []; + + adapter = new scmAdapters.github({ host: 'github.com', scm: 'github' }); + + downloadsStub = { + download: sandbox.stub().resolves(123), + search: sandbox.stub().resolves([{ filename: '/tmp/file.ext' }]), + onChanged: { + addListener: (cb: any) => downloadListeners.push(cb), + removeListener: sandbox.stub(), + }, + erase: sandbox.stub().resolves(undefined), + }; + tabsStub = { + update: sandbox.stub().resolves(undefined), + }; + + sandbox.stub(browser, 'downloads').get(() => downloadsStub); + sandbox.stub(browser, 'tabs').get(() => tabsStub); + + (global as any).fetch = sandbox.stub(); + + createObjectURLStub = sandbox + .stub(URL, 'createObjectURL') + .returns('blob://fake'); + revokeObjectURLStub = sandbox + .stub(URL, 'revokeObjectURL') + .callsFake(() => {}); + + (global as any).Buffer = Buffer; +}); + +afterEach(() => { + sandbox.restore(); +}); + +describe('Download helpers', () => { + describe('calcShortHash()', () => { + it('returns first 8 characters', () => { + const res = adapter['calcShortHash']('abcdefghijklmnop'); + expect(res).to.equal('abcdefgh'); + }); + }); + + describe('downloadDummy()', () => { + it('calls doDownload with correct data URL and filename', async () => { + const ddSpy = sandbox + .stub(adapter as any, 'doDownload') + .resolves('/tmp/diff/file.ext'); + const out = await adapter['downloadDummy']('path/to/file.ext', '.X'); + expect( + ddSpy.calledWith( + 'data:text/ext;charset=utf-8,', + 'diff/file/file.X.ext', + ), + ).to.equal(true); + expect(out).to.equal('/tmp/diff/file.ext'); + }); + + it('uses correct mime type', async () => { + const spy = sandbox.stub(adapter as any, 'doDownload'); + await adapter['downloadDummy']('some/path/file.ext', '.X'); + expect( + spy.calledWith('data:text/ext;charset=utf-8,', 'diff/file/file.X.ext'), + ).to.be(true); + }); + }); + + describe('doDownload()', () => { + it('resolves when download completes and removes listener', async () => { + const promise = adapter['doDownload']('url', 'file.ext'); + await Promise.resolve(); + + const downloadStub = downloadsStub.download as SinonStub; + expect( + downloadStub.calledWith({ + url: 'url', + filename: 'file.ext', + conflictAction: 'overwrite', + }), + ).to.equal(true); + + downloadListeners.forEach((cb) => + cb({ id: 123, state: { current: 'complete' } }), + ); + + const filename = await promise; + expect(filename).to.equal('/tmp/file.ext'); + + expect(downloadsStub.onChanged.removeListener.called).to.equal(true); + expect(downloadsStub.erase.calledWith({ id: 123 })).to.equal(true); + }); + + it('throws if downloadId is undefined', async () => { + downloadsStub.download.resolves(undefined); + try { + await adapter['doDownload']('url', 'file.ext'); + expect().fail('Expected error not thrown'); + } catch (err: any) { + expect(err.message).to.be('Failed to start download'); + } + }); + + it('throws if search returns no items', async () => { + const promise = adapter['doDownload']('url', 'file.ext'); + await Promise.resolve(); + downloadsStub.search.resolves([]); + downloadListeners.forEach((cb) => + cb({ id: 123, state: { current: 'complete' } }), + ); + try { + await promise; + expect().fail('Expected error not thrown'); + } catch (err: any) { + expect(err.message).to.be('Failed to retrieve download item'); + } + }); + }); + + describe('doDownloadFile()', () => { + const apiUrl = 'https://api'; + const filename = 'dir/file.ext'; + const suffix = '.S'; + const token = 'token'; + const sha = 'abc123'; + const base64Content = Buffer.from('content').toString('base64'); + const fakeJson = { content: base64Content }; + + beforeEach(() => { + (global as any).fetch.resolves({ + ok: true, + statusText: 'OK', + json: async () => fakeJson, + blob: async () => new Blob([Uint8Array.from('content')]), + headers: { get: (_: string) => null }, + }); + }); + + it('handles JSON type with ObjectURL support', async () => { + sandbox.stub(adapter as any, 'doDownload').resolves('/out.ext'); + + const out = await adapter['doDownloadFile']( + apiUrl, + 'json', + filename, + suffix, + token, + sha, + ); + + expect( + (global as any).fetch.calledWith(apiUrl, { + headers: adapter.createHeaders(token), + }), + ).to.be(true); + expect(createObjectURLStub.called).to.be(true); + expect(revokeObjectURLStub.called).to.be(true); + expect(out).to.be('/out.ext'); + }); + + it('throws a non-ok response', async () => { + (global as any).fetch.resolves({ ok: false, statusText: '404' }); + try { + await adapter['doDownloadFile']( + apiUrl, + 'json', + filename, + suffix, + token, + sha, + ); + expect().fail('Expected error not thrown'); + } catch (err: any) { + expect(err.message).to.match(/Failed to fetch file dir\/file.ext/); + } + }); + + it('handles RAW type with ObjectURL support', async () => { + const blobUrl = 'blob:fake-object-url'; + createObjectURLStub.returns(blobUrl); + const contentBytes = Uint8Array.from(Buffer.from('content')); + (global as any).fetch.resolves({ + ok: true, + statusText: 'OK', + blob: async () => new Blob([contentBytes]), + headers: { get: (_: string) => null }, + }); + const ddStub = sandbox + .stub(adapter as any, 'doDownload') + .resolves('/withObjectURL.ext'); + const out = await adapter['doDownloadFile']( + apiUrl, + 'raw', + 'dir/file.ext', + '.S', + token, + sha, + ); + const expectedName = 'diff/file/file.S.ext'; + expect(ddStub.calledWith(blobUrl, expectedName)).to.be(true); + expect(out).to.be('/withObjectURL.ext'); + }); + + it('handles RAW type fallback (no ObjectURL) by building correct data URI', async () => { + (URL.createObjectURL as SinonStub).restore(); + (URL.createObjectURL as any) = undefined; + const contentBytes = Uint8Array.from(Buffer.from('content')); + (global as any).fetch.resolves({ + ok: true, + statusText: 'OK', + blob: async () => new Blob([contentBytes]), + headers: { get: (_: string) => null }, + }); + const ddStub = sandbox + .stub(adapter as any, 'doDownload') + .resolves('/fallback.ext'); + const out = await adapter['doDownloadFile']( + apiUrl, + 'raw', + 'dir/file.ext', + '.S', + token, + sha, + ); + // "content" -> Base64 "Y29udGVudA==" -> URI‑encoded "Y29udGVudA%3D%3D" + const expectedDataUrl = 'data:text/ext;base64,Y29udGVudA%3D%3D'; + const expectedName = 'diff/file/file.S.ext'; + expect(ddStub.calledWith(expectedDataUrl, expectedName)).to.be(true); + expect(out).to.be('/fallback.ext'); + }); + + it('throws on unknown type', async () => { + try { + await adapter['doDownloadFile']( + apiUrl, + 'xml', + filename, + suffix, + token, + sha, + ); + expect().fail('Expected error not thrown'); + } catch (err: any) { + expect(err.message).to.be('Unknown download type: xml'); + } + }); + + it('builds data URI when URL.createObjectURL is unavailable', async () => { + (URL.createObjectURL as SinonStub).restore(); + (URL.createObjectURL as any) = undefined; + + const payload = Buffer.from('hello').toString('base64'); + (global as any).fetch.resolves({ + ok: true, + statusText: 'OK', + json: async () => ({ content: payload }), + headers: { get: (_: string) => null }, + }); + + const dd = sandbox + .stub(adapter as any, 'doDownload') + .resolves('/jf.json'); + const out = await adapter['doDownloadFile']( + 'u', + 'json', + 'dir/foo.txt', + '.S', + 'T', + 'sha', + ); + + const expected = `data:text/plain;base64,${payload}`; + expect(dd.calledWith(expected, 'diff/foo/foo.S.txt')).to.be(true); + expect(out).to.be('/jf.json'); + }); + }); + + describe('downloadDiff()', () => { + it('combines doDownloadFile and downloadDummy and updates the tab', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: false, + renamed: false, + deleted: true, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'raw', old: 'url/old', new: 'url/new' }, + }; + + const ddFileStub = sandbox + .stub(adapter as any, 'doDownloadFile') + .resolves('/old.ext'); + const dummyStub = sandbox + .stub(adapter as any, 'downloadDummy') + .resolves('/new.ext'); + + await adapter.downloadDiff(file, 'token'); + + expect( + ddFileStub.calledWith( + 'url/old', + 'raw', + 'file.ext', + '.shaOld.old', + 'token', + 'shaOld', + ), + ).to.be(true); + expect(dummyStub.calledWith('file.ext', '.shaNew.new')).to.be(true); + + const expectedUrl = encodeURI( + 'tracetronic://diff?file1=/old.ext&file2=/new.ext&cleanup=True', + ); + expect(tabsStub.update.calledWith({ url: expectedUrl })).to.be(true); + }); + + it('uses downloadDummy for old file when new=true and doDownloadFile for new file when not deleted', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: true, + renamed: false, + deleted: false, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'raw', old: 'url/old', new: 'url/new' }, + }; + + const dummyStub = sandbox + .stub(adapter as any, 'downloadDummy') + .resolves('/old.ext'); + const ddFileStub = sandbox + .stub(adapter as any, 'doDownloadFile') + .resolves('/new.ext'); + + await adapter.downloadDiff(file, 'token'); + expect(dummyStub.calledWith('file.ext', '.shaOld.old')).to.be(true); + + expect( + ddFileStub.calledWith( + 'url/new', + 'raw', + 'file.ext', + '.shaNew.new', + 'token', + 'shaNew', + ), + ).to.be(true); + const expectedUrl = encodeURI( + 'tracetronic://diff?file1=/old.ext&file2=/new.ext&cleanup=True', + ); + expect(tabsStub.update.calledWith({ url: expectedUrl })).to.be(true); + }); + }); + + describe('downloadFile()', () => { + it('calls doDownloadFile then updates the tab', async () => { + const file: ModifiedFile = { + filename: 'file.ext', + filenameOld: 'file.ext', + new: true, + renamed: false, + deleted: false, + additions: 0, + deletions: 0, + shaOld: 'shaOld', + shaNew: 'shaNew', + download: { type: 'json', old: 'url/old', new: 'url/new' }, + }; + + const ddFileStub = sandbox + .stub(adapter as any, 'doDownloadFile') + .resolves('/file.ext'); + await adapter.downloadFile(file, 'new', 'token'); + expect( + ddFileStub.calledWith( + 'url/new', + 'json', + 'file.ext', + '.shaNew.new', + 'token', + 'shaNew', + ), + ).to.be(true); + + await adapter.downloadFile(file, 'old', 'token'); + expect( + ddFileStub.calledWith( + 'url/old', + 'json', + 'file.ext', + '.shaOld.old', + 'token', + 'shaOld', + ), + ).to.be(true); + + const tabUrl = encodeURI('tracetronic:///' + '/file.ext'); + expect(tabsStub.update.calledWith({ url: tabUrl })).to.be(true); + }); + }); +}); diff --git a/test/scm.fetch-logic.test.ts b/test/scm.fetch-logic.test.ts new file mode 100644 index 0000000..90ad663 --- /dev/null +++ b/test/scm.fetch-logic.test.ts @@ -0,0 +1,459 @@ +import { scmAdapters } from '../src/scm.js'; +import { ModifiedFile } from '../src/types.ts'; +import expect from 'expect.js'; +import sinon from 'sinon'; + +const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); +const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); + +describe('Fetch Logic', () => { + describe('getCommitDetails()', () => { + context('GitHub', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: '123abc' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gh as any, 'getApiUrl').returns('https://api'); + sinon + .stub(gh as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'token tok' }); + }); + afterEach(() => sinon.restore()); + + it('returns parsed JSON when response.ok=true', async () => { + (globalThis as any).fetch = sinon.stub().resolves({ + ok: true, + statusText: 'OK', + json: async () => ({ sha: 'sha123', files: [] }), + }); + + const result = await (gh as any).getCommitDetails(fakeInfo, token); + expect(result).to.eql({ sha: 'sha123', files: [] }); + }); + + it('throws if response.ok=false', async () => { + (globalThis as any).fetch = sinon.stub().resolves({ + ok: false, + statusText: 'Not Found', + }); + + try { + await (gh as any).getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match( + /Failed to retrieve commit details: Not Found/, + ); + } + }); + }); + + context('GitLab diff error', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: 'abc123' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + }); + afterEach(() => sinon.restore()); + + it('throws if diff page 1 fetch fails', async () => { + (globalThis as any).fetch = sinon + .stub() + .onFirstCall() + .resolves({ ok: true, json: async () => ({ parent_ids: ['p1'] }) }) + .onSecondCall() + .resolves({ ok: false, status: 500, statusText: 'Internal Error' }); + + try { + await (gl as any).getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match(/\[500\] Internal Error/); + } + }); + }); + + context('GitLab nested project group', () => { + const token = 'token'; + const fakeInfo = { + owner: 'group/subgroup', + repo: 'project', + commitHash: 'abc123', + }; + + beforeEach(() => { + sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer token' }); + + sinon.replace( + globalThis, + 'fetch', + sinon + .stub() + .onFirstCall() + .resolves( + new Response(JSON.stringify({ parent_ids: ['p1'] }), { + status: 200, + headers: new Headers({ 'Content-Type': 'application/json' }), + }), + ) + .onSecondCall() + .resolves( + new Response( + JSON.stringify([ + { + diff: '+x\n-y', + new_path: 'file.pkg', + old_path: 'file.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + }, + ]), + { status: 200, headers: new Headers({ 'x-total-pages': '1' }) }, + ), + ), + ); + }); + + afterEach(() => sinon.restore()); + + it('handles commit from nested group project', async () => { + const result = await (gl as any).getCommitDetails(fakeInfo, token); + expect(result).to.have.property('sha', 'abc123'); + expect(result.files).to.be.an('array'); + expect(result.files[0].filename).to.equal('file.pkg'); + }); + }); + }); + + describe('getPullDetails()', () => { + context('GitHub error paths', () => { + const token = 'token'; + const pullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + + beforeEach(() => { + sinon.stub(gh as any, 'getApiUrl').returns('https://api'); + sinon + .stub(gh as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'token token' }); + }); + afterEach(() => sinon.restore()); + + it('throws when first page of pull-files is non-ok', async () => { + (globalThis as any).fetch = sinon + .stub() + .onFirstCall() + .resolves({ ok: true, json: async () => {} }) + .onSecondCall() + .resolves({ ok: false, statusText: 'Bad Gateway' }); + + try { + await (gh as any).getPullDetails(pullInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match( + /Failed to retrieve paginated data \(page 1\): Bad Gateway/, + ); + } + }); + + it('throws if initial pull metadata fetch fails', async () => { + (globalThis as any).fetch = sinon.stub().resolves({ + ok: false, + statusText: 'Not Found', + }); + + try { + await (gh as any).getPullDetails(pullInfo, token); + expect().fail('Expected error not thrown'); + } catch (err: any) { + expect(err.message).to.match( + /Failed to retrieve pull details: Not Found/, + ); + } + }); + }); + + context('GitLab MR diff error', () => { + const fakeInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const token = 'tok'; + + beforeEach(() => { + sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + }); + afterEach(() => sinon.restore()); + + it('throws if MR diff page 2 fetch fails', async () => { + (globalThis as any).fetch = (input: any) => { + const urlStr = typeof input === 'string' ? input : input.url; + const u = new URL(urlStr); + + if ( + u.pathname.endsWith('/diffs') && + u.searchParams.get('page') === '1' + ) { + return Promise.resolve( + new Response( + JSON.stringify([ + { + diff: '+a\n-b', + new_path: 'f.pkg', + old_path: 'f.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]), + { status: 200, headers: new Headers({ 'x-total-pages': '2' }) }, + ), + ); + } + + if ( + u.pathname.endsWith('/diffs') && + u.searchParams.get('page') === '2' + ) { + return Promise.resolve( + new Response(null, { status: 502, statusText: 'Bad Gateway' }), + ); + } + + return Promise.reject(new Error('Unexpected fetch ' + urlStr)); + }; + + try { + await (gl as any).getPullDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match( + /Failed to retrieve paginated data \(page 2\): \[502\] Bad Gateway/, + ); + } + }); + + it('throws if final merge request metadata fetch fails', async () => { + (globalThis as any).fetch = (input: any) => { + const url = typeof input === 'string' ? input : input.url; + const u = new URL(url); + + if (u.pathname.endsWith('/diffs')) { + if (u.searchParams.get('page') === '1') { + return Promise.resolve( + new Response( + JSON.stringify([ + { + diff: '+a\n-b', + new_path: 'f.pkg', + old_path: 'f.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]), + { + status: 200, + headers: new Headers({ 'x-total-pages': '2' }), + }, + ), + ); + } + return Promise.resolve( + new Response(JSON.stringify([]), { status: 200 }), + ); + } + + return Promise.resolve({ + ok: false, + status: 404, + statusText: 'Not Found', + }); + }; + + try { + await (gl as any).getPullDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match( + /Failed to retrieve merge request details: \[404\] Not Found/, + ); + } + }); + }); + }); + + describe('fetchModifiedFiles()', () => { + const cases = [ + { + name: 'GitHub', + Adapter: scmAdapters.github, + commitUrl: 'https://github.com/foo/bar/commit/123abc', + prUrl: 'https://github.com/foo/bar/pull/1', + hostInfo: { host: 'github.com', scm: 'github' as const }, + }, + { + name: 'GitLab', + Adapter: scmAdapters.gitlab, + commitUrl: 'https://gitlab.com/foo/bar/-/commit/123abc', + prUrl: 'https://gitlab.com/foo/bar/-/merge_requests/1', + hostInfo: { host: 'gitlab.com', scm: 'gitlab' as const }, + }, + ]; + + cases.forEach(({ name, Adapter, hostInfo, commitUrl, prUrl }) => { + describe(`${name} Adapter`, () => { + let adapter: any; + let fakeFiles: ModifiedFile[]; + let stubCommit: sinon.SinonStub; + let stubPull: sinon.SinonStub; + + beforeEach(() => { + adapter = new Adapter(hostInfo); + fakeFiles = [ + { + filename: 'x.ts', + filenameOld: 'x.ts', + new: false, + renamed: false, + deleted: false, + additions: 1, + deletions: 0, + shaOld: 'o', + shaNew: 'n', + download: { type: 'json', old: 'o', new: 'n' }, + }, + ]; + stubCommit = sinon.stub(adapter, 'handleCommit').resolves(fakeFiles); + stubPull = sinon + .stub(adapter, 'handlePullRequest') + .resolves(fakeFiles); + }); + + afterEach(() => sinon.restore()); + + it('calls handleCommit for commit URLs', async () => { + const result = await adapter.fetchModifiedFiles(commitUrl, 'token'); + expect(stubCommit.calledOnce).to.equal(true); + expect(stubPull.notCalled).to.equal(true); + expect(result).to.eql(fakeFiles); + }); + + it('calls handlePullRequest for pull/merge request URLs', async () => { + const result = await adapter.fetchModifiedFiles(prUrl, 'token'); + expect(stubPull.calledOnce).to.equal(true); + expect(stubCommit.notCalled).to.equal(true); + expect(result).to.eql(fakeFiles); + }); + + it('throws on malformed URL', async () => { + try { + await adapter.fetchModifiedFiles('not-a-url', 'token'); + throw new Error('Promise did not reject'); + } catch (err: any) { + expect(err.message).to.match(/Not a valid URL: not-a-url/); + } + }); + + it('throws if URL is neither commit nor pull/merge request', async () => { + try { + const badUrl = + name === 'GitHub' + ? 'https://github.com/foo/bar/issues/1' + : 'https://gitlab.com/foo/bar/-/issues/1'; + await adapter.fetchModifiedFiles(badUrl, 'token'); + throw new Error('Promise did not reject'); + } catch (err: any) { + expect(err.message).to.match( + /Not a GitHub commit or pull request page/, + ); + } + }); + }); + }); + }); + + describe('test()', () => { + describe('GitHub Adapter', () => { + it('returns true when fetch.ok is true', async () => { + (globalThis as any).fetch = () => Promise.resolve({ ok: true } as any); + const result = await gh.test('token'); + expect(result).to.equal(true); + }); + + it('returns false then fetch.ok is false', async () => { + (globalThis as any).fetch = () => Promise.resolve({ ok: false } as any); + const result = await gh.test('token'); + expect(result).to.equal(false); + }); + + it('returns false on network error', async () => { + const errStub = sinon.stub(console, 'error'); + (globalThis as any).fetch = () => Promise.reject(new Error()); + const result = await gh.test('token'); + expect(result).to.equal(false); + errStub.restore(); + }); + }); + + describe('GitLab Adapter', () => { + it('returns true on 200 OK', async () => { + (globalThis as any).fetch = () => + Promise.resolve({ + ok: true, + status: 200, + headers: { get: (_: string) => null }, + } as any); + const result = await gl.test('token'); + expect(result).to.equal(true); + }); + + it('returns true on 403 with x-gitlab-meta header', async () => { + (globalThis as any).fetch = () => + Promise.resolve({ + ok: false, + status: 403, + headers: { + get: (name: string) => (name === 'x-gitlab-meta' ? 'yes' : null), + }, + } as any); + const result = await gl.test('token'); + expect(result).to.equal(true); + }); + + it('returns false on 403 without x-gitlab-meta header', async () => { + (globalThis as any).fetch = () => + Promise.resolve({ + ok: false, + status: 403, + headers: { get: () => null }, + } as any); + const result = await gl.test('token'); + expect(result).to.equal(false); + }); + + it('returns false on network error', async () => { + const errStub = sinon.stub(console, 'error'); + (globalThis as any).fetch = () => Promise.reject(new Error()); + const result = await gl.test('token'); + expect(result).to.equal(false); + errStub.restore(); + }); + }); + }); +}); diff --git a/test/scm.gitlab-internals.test.ts b/test/scm.gitlab-internals.test.ts new file mode 100644 index 0000000..05de1d8 --- /dev/null +++ b/test/scm.gitlab-internals.test.ts @@ -0,0 +1,68 @@ +import { scmAdapters } from '../src/scm.js'; +import expect from 'expect.js'; +import sinon from 'sinon'; + +const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); + +describe('GitLab Adapter internals', () => { + it('parseStats() counts additions and deletions correctly', () => { + const stats = (gl as any).parseStats('\n+a\n-b\n+c\n-d\n'); + expect(stats).to.eql({ additions: 2, deletions: 2 }); + }); + + it('parseStats() returns zeroes on empty diff', () => { + const stats = (gl as any).parseStats(''); + expect(stats).to.eql({ additions: 0, deletions: 0 }); + }); + + it('processChanges() filters out unsupported filetypes', () => { + const raw = [ + { + diff: '+x\n-y', + new_path: 'keep.pkg', + old_path: 'keep.pkg', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + { + diff: '+x\n-y', + new_path: 'skip.txt', + old_path: 'skip.txt', + new_file: false, + renamed_file: false, + deleted_file: false, + generated_file: null, + }, + ]; + const out = (gl as any).processChanges(raw); + expect(out).to.have.length(1); + expect(out[0].filename).to.equal('keep.pkg'); + }); + + it('getCommitDetails throws on initial metadata fetch failure', async () => { + const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: '123abc' }; + const token = 'tok'; + sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon + .stub(gl as any, 'createHeaders') + .withArgs(token) + .returns({ Authorization: 'Bearer tok' }); + + (globalThis as any).fetch = sinon.stub().resolves({ + ok: false, + status: 401, + statusText: 'Unauthorized', + }); + + try { + await (gl as any).getCommitDetails(fakeInfo, token); + expect().fail('Expected error'); + } catch (err: any) { + expect(err.message).to.match(/\[401\] Unauthorized/); + } finally { + sinon.restore(); + } + }); +}); diff --git a/test/scm.mapping.test.ts b/test/scm.mapping.test.ts new file mode 100644 index 0000000..de067e9 --- /dev/null +++ b/test/scm.mapping.test.ts @@ -0,0 +1,249 @@ +import { scmAdapters } from '../src/scm.js'; +import { ModifiedFile } from '../src/types.ts'; +import expect from 'expect.js'; +import sinon, { SinonStub } from 'sinon'; + +const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); +const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); + +describe('Mapping & Filtering (response files to internal files)', () => { + describe('handleCommit()', () => { + context('GitHub Adapter', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: 'abc123', + }; + const fakeApiResponse = { + sha: 'sha', + parents: [{ sha: 'parentSha' }], + files: [ + { + filename: 'keep.pkg', + previous_filename: 'oldKeep.pkg', + additions: 2, + deletions: 1, + status: 'renamed', + sha: 'sha', + blob_url: '', + raw_url: '', + content_url: '', + }, + { + filename: 'skip.txt', + previous_filename: 'skip.txt', + additions: 1, + deletions: 0, + status: 'modified', + sha: 'sha', + blob_url: '', + raw_url: '', + content_url: '', + }, + ], + }; + + beforeEach(() => { + sinon.stub(gh as any, 'getCommitDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('filters and maps commit files correctly', async () => { + const result = await (gh as any).handleCommit(fakeCommitInfo, 'token'); + expect(result).to.have.length(1); + const mf: ModifiedFile = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('parentSha'); + expect(mf.shaNew).to.equal('sha'); + expect(mf.download.new).to.match(/contents\/keep\.pkg\?ref=sha$/); + expect(mf.download.old).to.match( + /contents\/oldKeep\.pkg\?ref=parentSha$/, + ); + }); + + it('throws if commitData.files is missing or not an array', async () => { + ((gh as any).getCommitDetails as SinonStub).restore(); + const stub = sinon + .stub(gh as any, 'getCommitDetails') + .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); + try { + await (gh as any).handleCommit(fakeCommitInfo, 'token'); + throw new Error('Promise did not reject'); + } catch (err) { + expect(err.message).to.match(/Unable to retrieve modified files/); + } finally { + stub.restore(); + } + }); + }); + + context('GitLab Adapter', () => { + const fakeCommitInfo = { + owner: 'foo', + repo: 'bar', + commitHash: 'abc123', + }; + const fakeApiResponse = { + sha: 'sha', + parents: [{ sha: 'parentSha' }], + files: [ + { + filename: 'keep.pkg', + filenameOld: 'oldKeep.pkg', + new: false, + renamed: true, + deleted: false, + additions: 2, + deletions: 1, + }, + ], + }; + + beforeEach(() => { + sinon.stub(gl as any, 'getCommitDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('processes stats correctly', async () => { + const result = await (gl as any).handleCommit(fakeCommitInfo, 'token'); + expect(result).to.have.length(1); + const mf: ModifiedFile = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.deleted).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('parentSha'); + expect(mf.shaNew).to.equal('sha'); + expect(mf.download.new).to.match( + /repository\/files\/keep\.pkg\/raw\?ref=sha$/, + ); + expect(mf.download.old).to.match( + /repository\/files\/oldKeep\.pkg\/raw\?ref=parentSha$/, + ); + }); + + it('throws if commitData.files is missing or not an array', async () => { + ((gl as any).getCommitDetails as SinonStub).restore(); + const stub = sinon + .stub(gl as any, 'getCommitDetails') + .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); + try { + await (gl as any).handleCommit(fakeCommitInfo, 'token'); + throw new Error('Promise did not reject'); + } catch (err) { + expect(err.message).to.match(/Unable to retrieve modified files/); + } finally { + stub.restore(); + } + }); + }); + }); + + describe('handlePullRequest()', () => { + context('GitHub Adapter', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const fakeApiResponse = { + info: { base: { sha: 'baseSha' }, head: { sha: 'headSha' } }, + files: [ + { + additions: 2, + deletions: 1, + filename: 'keep.pkg', + previous_filename: 'oldKeep.pkg', + sha: 'headSha', + status: 'renamed', + blob_url: '', + raw_url: '', + content_url: '', + }, + { + additions: 1, + deletions: 0, + filename: 'skip.txt', + previous_filename: 'skip.txt', + sha: 'headSha', + status: 'modified', + blob_url: '', + raw_url: '', + content_url: '', + }, + ], + }; + + beforeEach(() => { + sinon.stub(gh as any, 'getPullDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('filters and maps pull request files correctly', async () => { + const result: ModifiedFile[] = await (gh as any).handlePullRequest( + fakePullInfo, + 'token', + ); + expect(result).to.have.length(1); + const mf = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.deleted).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.shaOld).to.equal('baseSha'); + expect(mf.shaNew).to.equal('headSha'); + expect(mf.download.old).to.match(/contents\/keep\.pkg\?ref=baseSha$/); + expect(mf.download.new).to.match(/contents\/keep\.pkg\?ref=headSha$/); + }); + }); + + context('GitLab Adapter', () => { + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; + const fakeApiResponse = { + info: { base: { sha: 'baseSha' }, head: { sha: 'headSha' } }, + files: [ + { + filename: 'keep.pkg', + filenameOld: 'oldKeep.pkg', + additions: 2, + deletions: 1, + new: false, + renamed: true, + deleted: false, + }, + ], + }; + + beforeEach(() => { + sinon.stub(gl as any, 'getPullDetails').resolves(fakeApiResponse); + }); + afterEach(() => sinon.restore()); + + it('maps merge request files correctly', async () => { + const result: ModifiedFile[] = await (gl as any).handlePullRequest( + fakePullInfo, + 'token', + ); + expect(result).to.have.length(1); + const mf = result[0]; + expect(mf.filename).to.equal('keep.pkg'); + expect(mf.filenameOld).to.equal('oldKeep.pkg'); + expect(mf.additions).to.equal(2); + expect(mf.deletions).to.equal(1); + expect(mf.new).to.equal(false); + expect(mf.renamed).to.equal(true); + expect(mf.deleted).to.equal(false); + expect(mf.shaOld).to.equal('baseSha'); + expect(mf.shaNew).to.equal('headSha'); + expect(mf.download.old).to.match(/raw\?ref=baseSha$/); + expect(mf.download.new).to.match(/raw\?ref=headSha$/); + }); + }); + }); +}); diff --git a/test/scm.methods.test.ts b/test/scm.methods.test.ts new file mode 100644 index 0000000..5963968 --- /dev/null +++ b/test/scm.methods.test.ts @@ -0,0 +1,83 @@ +import { scmAdapters } from '../src/scm.js'; +import { SUPPORTED_FILES } from '../src/types.ts'; +import expect from 'expect.js'; + +const adapterCases = [ + { + name: 'GitHub', + Class: scmAdapters.github, + host: 'github.com', + customHost: 'gh.custom', + expectedApiUrl: 'https://api.github.com', + expectedCustomApiUrl: 'https://gh.custom/api/v3', + tokenPrefix: 'token', + scm: 'github' as const, + }, + { + name: 'GitLab', + Class: scmAdapters.gitlab, + host: 'gitlab.com', + customHost: 'gl.custom', + expectedApiUrl: 'https://gitlab.com/api/v4', + expectedCustomApiUrl: 'https://gl.custom/api/v4', + tokenPrefix: 'Bearer', + scm: 'gitlab' as const, + }, +]; + +describe('Adapter Methods', () => { + describe('getApiUrl()', () => { + adapterCases.forEach( + ({ + name, + Class, + host, + customHost, + expectedApiUrl, + expectedCustomApiUrl, + scm, + }) => { + it(`${name} default host returns correct API URL`, () => { + const adapter = new Class({ host, scm }); + expect((adapter as any).getApiUrl()).to.equal(expectedApiUrl); + }); + it(`${name} custom host returns correct API URL`, () => { + const adapter = new Class({ host: customHost, scm }); + expect((adapter as any).getApiUrl()).to.equal(expectedCustomApiUrl); + }); + }, + ); + }); + + describe('createHeaders()', () => { + adapterCases.forEach(({ name, Class, host, tokenPrefix, scm }) => { + it(`${name} adds correct Authorization header`, () => { + const adapter = new Class({ host, scm }); + const headers = (adapter as any).createHeaders('abc123'); + expect(headers).to.have.property( + 'Authorization', + `${tokenPrefix} abc123`, + ); + }); + }); + }); + + describe('isSupportedFile()', () => { + adapterCases.forEach(({ name, Class, host, scm }) => { + const adapter = new Class({ host, scm }); + it(`${name} returns true for supported extensions`, () => { + SUPPORTED_FILES.forEach((ext) => { + expect((adapter as any).isSupportedFile(`file.${ext}`)).to.equal( + true, + ); + }); + }); + it(`${name} returns false for unsupported or missing extensions`, () => { + expect((adapter as any).isSupportedFile('file.unknownext')).to.equal( + false, + ); + expect((adapter as any).isSupportedFile('file')).to.equal(false); + }); + }); + }); +}); diff --git a/test/scm.url.detection.test.ts b/test/scm.url.detection.test.ts new file mode 100644 index 0000000..b258ecc --- /dev/null +++ b/test/scm.url.detection.test.ts @@ -0,0 +1,101 @@ +import { scmAdapters } from '../src/scm.js'; +import { HostInfo } from '../src/types.js'; +import expect from 'expect.js'; + +const GITHUB_REPO = 'https://github.com/foo/bar'; +const GITLAB_REPO = 'https://gitlab.com/foo/bar'; +const GITHUB_PR_VARIANTS = [ + '', + 'commits', + 'commits/123abc', + 'checks', + 'files', + 'unexisting_subpage', +]; +const GITLAB_MR_VARIANTS = [ + '', + 'commits', + 'commits/123abc', + 'pipelines', + 'diffs', + 'unexisting_subpage', +]; + +const gh = new scmAdapters.github({ + host: 'github.com', + scm: 'github', +} as HostInfo); +const gl = new scmAdapters.gitlab({ + host: 'gitlab.com', + scm: 'gitlab', +} as HostInfo); + +describe('URL Detection', () => { + describe('GitHub Adapter', () => { + it('recognizes valid commit URLs', () => { + const url = new URL(`${GITHUB_REPO}/commit/123abc`); + expect((gh as any).testCommit(url)).to.not.equal(null); + expect((gh as any).testPullRequest(url)).to.equal(null); + }); + + it('returns null for invalid commit URLs', () => { + [ + `${GITHUB_REPO}/commit`, + `${GITHUB_REPO}/commits`, + `${GITHUB_REPO}/commit/`, + ].forEach((urlStr) => { + const url = new URL(urlStr); + expect((gh as any).testCommit(url)).to.equal(null); + expect((gh as any).testPullRequest(url)).to.equal(null); + }); + }); + + it('recognizes valid pull request URLs (including subpages)', () => { + GITHUB_PR_VARIANTS.forEach((suffix) => { + const url = new URL(`${GITHUB_REPO}/pull/1/${suffix}`); + expect((gh as any).testPullRequest(url)).to.not.equal(null); + expect((gh as any).testCommit(url)).to.equal(null); + }); + }); + + it('returns null for invalid pull request URLs', () => { + const url = new URL(`${GITHUB_REPO}/pull`); + expect((gh as any).testCommit(url)).to.equal(null); + expect((gh as any).testPullRequest(url)).to.equal(null); + }); + }); + + describe('GitLab Adapter', () => { + it('recognizes valid commit URLs', () => { + const url = new URL(`${GITLAB_REPO}/-/commit/123abc`); + expect((gl as any).testCommit(url)).to.not.equal(null); + expect((gl as any).testPullRequest(url)).to.equal(null); + }); + + it('returns null for invalid commit URLs', () => { + [ + `${GITLAB_REPO}/-/commit`, + `${GITLAB_REPO}/-/commits`, + `${GITLAB_REPO}/-/commit/`, + ].forEach((urlStr) => { + const url = new URL(urlStr); + expect((gl as any).testCommit(url)).to.equal(null); + expect((gl as any).testPullRequest(url)).to.equal(null); + }); + }); + + it('recognizes valid merge request URLs (including subpages)', () => { + GITLAB_MR_VARIANTS.forEach((suffix) => { + const url = new URL(`${GITLAB_REPO}/-/merge_requests/1/${suffix}`); + expect((gl as any).testPullRequest(url)).to.not.equal(null); + expect((gl as any).testCommit(url)).to.equal(null); + }); + }); + + it('returns null for invalid merge request URLs', () => { + const url = new URL(`${GITLAB_REPO}/-/merge_requests`); + expect((gl as any).testCommit(url)).to.equal(null); + expect((gl as any).testPullRequest(url)).to.equal(null); + }); + }); +}); From f4ed34677503e3a79f25c8005b7f5a7a7b9009f9 Mon Sep 17 00:00:00 2001 From: Mario Schlicht <196292603+mschlicht-tt@users.noreply.github.com> Date: Wed, 27 Aug 2025 10:12:05 +0200 Subject: [PATCH 2/2] Fix typing for unittests to make linting happy --- src/scm.ts | 2 +- test/scm.details.test.ts | 32 +++--- test/scm.download.test.ts | 143 ++++++++++++++++---------- test/scm.fetch-logic.test.ts | 160 +++++++++++++++++------------- test/scm.gitlab-internals.test.ts | 22 ++-- test/scm.mapping.test.ts | 38 +++---- test/scm.methods.test.ts | 28 +++--- test/scm.url.detection.test.ts | 44 ++++---- test/utils.ts | 44 ++++++++ 9 files changed, 302 insertions(+), 211 deletions(-) create mode 100644 test/utils.ts diff --git a/src/scm.ts b/src/scm.ts index 1df6db1..1077205 100644 --- a/src/scm.ts +++ b/src/scm.ts @@ -62,7 +62,7 @@ type GitlabChange = { generated_file: boolean | null; }; -abstract class BaseScmAdapter { +export abstract class BaseScmAdapter { hostInfo: HostInfo; constructor(hostInfo: HostInfo) { this.hostInfo = hostInfo; diff --git a/test/scm.details.test.ts b/test/scm.details.test.ts index c29634b..411dc23 100644 --- a/test/scm.details.test.ts +++ b/test/scm.details.test.ts @@ -1,11 +1,9 @@ -import { scmAdapters } from '../src/scm.js'; import expect from 'expect.js'; - -const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); -const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); +import { createScmAdaptersForTests, globalWithFetch } from './utils.js'; +const { gh, gl } = createScmAdaptersForTests(); function mockGithubPullFetch(fileCount = 120) { - (globalThis as any).fetch = (input: any) => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { const url = typeof input === 'string' ? input : input.url; if ( url.startsWith('https://api.github.com/repos/foo/bar/pulls/1') && @@ -72,7 +70,7 @@ const GITLAB_FILE_BASE = { }; function mockGitlabCommitFetch(fileCount = 120) { - (globalThis as any).fetch = (input: any) => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { const url = typeof input === 'string' ? input : input.url; if ( @@ -116,7 +114,7 @@ function mockGitlabCommitFetch(fileCount = 120) { } function mockGitlabPullFetch(fileCount = 120) { - (globalThis as any).fetch = (input: any) => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { const url = typeof input === 'string' ? input : input.url; const u = new URL(url); @@ -165,7 +163,7 @@ function mockGitlabPullFetch(fileCount = 120) { } function clearFetchMock() { - delete (globalThis as any).fetch; + delete globalWithFetch.fetch; } describe('Commit and Pull Details', () => { @@ -177,7 +175,7 @@ describe('Commit and Pull Details', () => { it('collects all files across pull request pages (pagination)', async () => { mockGithubPullFetch(120); - const prData = await (gh as any).getPullDetails(fakePullInfo, 'token'); + const prData = await gh.getPullDetails(fakePullInfo, 'token'); expect(prData.info.base.sha).to.equal('baseSha'); expect(prData.info.head.sha).to.equal('headSha'); expect(prData.files).to.have.length(120); @@ -187,7 +185,7 @@ describe('Commit and Pull Details', () => { it('collects all files when only one page is returned (no pagination)', async () => { mockGithubPullFetch(5); - const prData = await (gh as any).getPullDetails(fakePullInfo, 'token'); + const prData = await gh.getPullDetails(fakePullInfo, 'token'); expect(prData.info.base.sha).to.equal('baseSha'); expect(prData.info.head.sha).to.equal('headSha'); expect(prData.files).to.have.length(5); @@ -209,10 +207,7 @@ describe('Commit and Pull Details', () => { it('collects all files across commit pages (pagination)', async () => { mockGitlabCommitFetch(120); - const commitData = await (gl as any).getCommitDetails( - fakeCommitInfo, - 'token', - ); + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); expect(commitData.sha).to.equal('123abc'); expect(commitData.parents[0].sha).to.equal('p1'); expect(commitData.files).to.have.length(120); @@ -220,10 +215,7 @@ describe('Commit and Pull Details', () => { it('collects all files when only one page is returned (no pagination)', async () => { mockGitlabCommitFetch(5); - const commitData = await (gl as any).getCommitDetails( - fakeCommitInfo, - 'token', - ); + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); expect(commitData.sha).to.equal('123abc'); expect(commitData.parents[0].sha).to.equal('p1'); expect(commitData.files).to.have.length(5); @@ -237,7 +229,7 @@ describe('Commit and Pull Details', () => { it('collects all files across merge request pages (pagination)', async () => { mockGitlabPullFetch(120); - const mrData = await (gl as any).getPullDetails(fakePullInfo, 'token'); + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); expect(mrData.info.base.sha).to.equal('baseSha'); expect(mrData.info.head.sha).to.equal('headSha'); expect(mrData.files).to.have.length(120); @@ -247,7 +239,7 @@ describe('Commit and Pull Details', () => { it('collects all files when only one page is returned (no pagination)', async () => { mockGitlabPullFetch(5); - const mrData = await (gl as any).getPullDetails(fakePullInfo, 'token'); + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); expect(mrData.info.base.sha).to.equal('baseSha'); expect(mrData.info.head.sha).to.equal('headSha'); expect(mrData.files).to.have.length(5); diff --git a/test/scm.download.test.ts b/test/scm.download.test.ts index e7dbe60..0a5a980 100644 --- a/test/scm.download.test.ts +++ b/test/scm.download.test.ts @@ -1,17 +1,37 @@ import { scmAdapters } from '../src/scm.js'; import { ModifiedFile } from '../src/types.ts'; + import expect from 'expect.js'; import sinon, { SinonStub } from 'sinon'; import browser from 'webextension-polyfill'; +type DownloadDelta = { id: number; state: { current: string } }; +type DownloadDeltaCallback = (delta: DownloadDelta) => void; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any let adapter: any; let sandbox: sinon.SinonSandbox; -let downloadListeners: Array<(delta: any) => void>; -let downloadsStub: any; -let tabsStub: any; +let downloadListeners: Array; +let downloadsStub: { + download: SinonStub; + search: SinonStub; + onChanged: { + addListener: (cb: DownloadDeltaCallback) => void; + removeListener: SinonStub; + }; + erase: SinonStub; +}; +let tabsStub: { update: SinonStub }; let createObjectURLStub: SinonStub; let revokeObjectURLStub: SinonStub; +interface GlobalWithFetchStub extends GlobalThis { + fetch?: SinonStub<[input: RequestInfo | { url: string }], Promise>; + Buffer: typeof Buffer; +} + +const globalWithFetchStub = globalThis as unknown as GlobalWithFetchStub; + beforeEach(() => { sandbox = sinon.createSandbox(); downloadListeners = []; @@ -22,7 +42,9 @@ beforeEach(() => { download: sandbox.stub().resolves(123), search: sandbox.stub().resolves([{ filename: '/tmp/file.ext' }]), onChanged: { - addListener: (cb: any) => downloadListeners.push(cb), + addListener: (cb: DownloadDeltaCallback) => { + downloadListeners.push(cb); + }, removeListener: sandbox.stub(), }, erase: sandbox.stub().resolves(undefined), @@ -34,7 +56,7 @@ beforeEach(() => { sandbox.stub(browser, 'downloads').get(() => downloadsStub); sandbox.stub(browser, 'tabs').get(() => tabsStub); - (global as any).fetch = sandbox.stub(); + globalWithFetchStub.fetch = sandbox.stub(); createObjectURLStub = sandbox .stub(URL, 'createObjectURL') @@ -43,7 +65,7 @@ beforeEach(() => { .stub(URL, 'revokeObjectURL') .callsFake(() => {}); - (global as any).Buffer = Buffer; + globalWithFetchStub.Buffer = Buffer; }); afterEach(() => { @@ -61,7 +83,7 @@ describe('Download helpers', () => { describe('downloadDummy()', () => { it('calls doDownload with correct data URL and filename', async () => { const ddSpy = sandbox - .stub(adapter as any, 'doDownload') + .stub(adapter, 'doDownload') .resolves('/tmp/diff/file.ext'); const out = await adapter['downloadDummy']('path/to/file.ext', '.X'); expect( @@ -74,7 +96,7 @@ describe('Download helpers', () => { }); it('uses correct mime type', async () => { - const spy = sandbox.stub(adapter as any, 'doDownload'); + const spy = sandbox.stub(adapter, 'doDownload'); await adapter['downloadDummy']('some/path/file.ext', '.X'); expect( spy.calledWith('data:text/ext;charset=utf-8,', 'diff/file/file.X.ext'), @@ -112,7 +134,8 @@ describe('Download helpers', () => { try { await adapter['doDownload']('url', 'file.ext'); expect().fail('Expected error not thrown'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.be('Failed to start download'); } }); @@ -127,7 +150,8 @@ describe('Download helpers', () => { try { await promise; expect().fail('Expected error not thrown'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.be('Failed to retrieve download item'); } }); @@ -143,17 +167,20 @@ describe('Download helpers', () => { const fakeJson = { content: base64Content }; beforeEach(() => { - (global as any).fetch.resolves({ - ok: true, - statusText: 'OK', - json: async () => fakeJson, - blob: async () => new Blob([Uint8Array.from('content')]), - headers: { get: (_: string) => null }, - }); + globalWithFetchStub.fetch = sandbox.stub().callsFake( + () => + Promise.resolve( + new Response(JSON.stringify(fakeJson), { + statusText: 'OK', + headers: new Headers(), + }), + ), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ) as any; }); it('handles JSON type with ObjectURL support', async () => { - sandbox.stub(adapter as any, 'doDownload').resolves('/out.ext'); + sandbox.stub(adapter, 'doDownload').resolves('/out.ext'); const out = await adapter['doDownloadFile']( apiUrl, @@ -164,9 +191,11 @@ describe('Download helpers', () => { sha, ); + // Access createHeaders via a subclass for testing + const headers = adapter.createHeaders(token); expect( - (global as any).fetch.calledWith(apiUrl, { - headers: adapter.createHeaders(token), + globalWithFetchStub.fetch!.calledWith(apiUrl, { + headers, }), ).to.be(true); expect(createObjectURLStub.called).to.be(true); @@ -175,7 +204,9 @@ describe('Download helpers', () => { }); it('throws a non-ok response', async () => { - (global as any).fetch.resolves({ ok: false, statusText: '404' }); + globalWithFetchStub.fetch!.resolves( + new Response(null, { statusText: '404', status: 404 }), + ); try { await adapter['doDownloadFile']( apiUrl, @@ -186,7 +217,8 @@ describe('Download helpers', () => { sha, ); expect().fail('Expected error not thrown'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match(/Failed to fetch file dir\/file.ext/); } }); @@ -195,14 +227,14 @@ describe('Download helpers', () => { const blobUrl = 'blob:fake-object-url'; createObjectURLStub.returns(blobUrl); const contentBytes = Uint8Array.from(Buffer.from('content')); - (global as any).fetch.resolves({ - ok: true, - statusText: 'OK', - blob: async () => new Blob([contentBytes]), - headers: { get: (_: string) => null }, - }); + globalWithFetchStub.fetch!.resolves( + new Response(new Blob([contentBytes]), { + statusText: 'OK', + headers: new Headers(), + }), + ); const ddStub = sandbox - .stub(adapter as any, 'doDownload') + .stub(adapter, 'doDownload') .resolves('/withObjectURL.ext'); const out = await adapter['doDownloadFile']( apiUrl, @@ -219,16 +251,18 @@ describe('Download helpers', () => { it('handles RAW type fallback (no ObjectURL) by building correct data URI', async () => { (URL.createObjectURL as SinonStub).restore(); - (URL.createObjectURL as any) = undefined; + (URL.createObjectURL as unknown) = undefined; + const contentBytes = Uint8Array.from(Buffer.from('content')); - (global as any).fetch.resolves({ - ok: true, - statusText: 'OK', - blob: async () => new Blob([contentBytes]), - headers: { get: (_: string) => null }, - }); + globalWithFetchStub.fetch!.resolves( + new Response(new Blob([contentBytes]), { + status: 200, + statusText: 'OK', + headers: new Headers(), + }), + ); const ddStub = sandbox - .stub(adapter as any, 'doDownload') + .stub(adapter, 'doDownload') .resolves('/fallback.ext'); const out = await adapter['doDownloadFile']( apiUrl, @@ -249,33 +283,32 @@ describe('Download helpers', () => { try { await adapter['doDownloadFile']( apiUrl, - 'xml', + 'xml' as unknown, filename, suffix, token, sha, ); expect().fail('Expected error not thrown'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.be('Unknown download type: xml'); } }); it('builds data URI when URL.createObjectURL is unavailable', async () => { (URL.createObjectURL as SinonStub).restore(); - (URL.createObjectURL as any) = undefined; + (URL.createObjectURL as unknown) = undefined; const payload = Buffer.from('hello').toString('base64'); - (global as any).fetch.resolves({ - ok: true, - statusText: 'OK', - json: async () => ({ content: payload }), - headers: { get: (_: string) => null }, - }); - - const dd = sandbox - .stub(adapter as any, 'doDownload') - .resolves('/jf.json'); + globalWithFetchStub.fetch!.resolves( + new Response(JSON.stringify({ content: payload }), { + statusText: 'OK', + headers: new Headers(), + }), + ); + + const dd = sandbox.stub(adapter, 'doDownload').resolves('/jf.json'); const out = await adapter['doDownloadFile']( 'u', 'json', @@ -307,10 +340,10 @@ describe('Download helpers', () => { }; const ddFileStub = sandbox - .stub(adapter as any, 'doDownloadFile') + .stub(adapter, 'doDownloadFile') .resolves('/old.ext'); const dummyStub = sandbox - .stub(adapter as any, 'downloadDummy') + .stub(adapter, 'downloadDummy') .resolves('/new.ext'); await adapter.downloadDiff(file, 'token'); @@ -348,10 +381,10 @@ describe('Download helpers', () => { }; const dummyStub = sandbox - .stub(adapter as any, 'downloadDummy') + .stub(adapter, 'downloadDummy') .resolves('/old.ext'); const ddFileStub = sandbox - .stub(adapter as any, 'doDownloadFile') + .stub(adapter, 'doDownloadFile') .resolves('/new.ext'); await adapter.downloadDiff(file, 'token'); @@ -390,7 +423,7 @@ describe('Download helpers', () => { }; const ddFileStub = sandbox - .stub(adapter as any, 'doDownloadFile') + .stub(adapter, 'doDownloadFile') .resolves('/file.ext'); await adapter.downloadFile(file, 'new', 'token'); expect( diff --git a/test/scm.fetch-logic.test.ts b/test/scm.fetch-logic.test.ts index 90ad663..06d1a1c 100644 --- a/test/scm.fetch-logic.test.ts +++ b/test/scm.fetch-logic.test.ts @@ -2,9 +2,9 @@ import { scmAdapters } from '../src/scm.js'; import { ModifiedFile } from '../src/types.ts'; import expect from 'expect.js'; import sinon from 'sinon'; +import { createScmAdaptersForTests, globalWithFetch } from './utils.ts'; -const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); -const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); +const { gh, gl } = createScmAdaptersForTests(); describe('Fetch Logic', () => { describe('getCommitDetails()', () => { @@ -13,35 +13,36 @@ describe('Fetch Logic', () => { const token = 'tok'; beforeEach(() => { - sinon.stub(gh as any, 'getApiUrl').returns('https://api'); + sinon.stub(gh, 'getApiUrl').returns('https://api'); sinon - .stub(gh as any, 'createHeaders') + .stub(gh, 'createHeaders') .withArgs(token) .returns({ Authorization: 'token tok' }); }); afterEach(() => sinon.restore()); it('returns parsed JSON when response.ok=true', async () => { - (globalThis as any).fetch = sinon.stub().resolves({ + globalWithFetch.fetch = sinon.stub().resolves({ ok: true, statusText: 'OK', json: async () => ({ sha: 'sha123', files: [] }), }); - const result = await (gh as any).getCommitDetails(fakeInfo, token); + const result = await gh.getCommitDetails(fakeInfo, token); expect(result).to.eql({ sha: 'sha123', files: [] }); }); it('throws if response.ok=false', async () => { - (globalThis as any).fetch = sinon.stub().resolves({ + globalWithFetch.fetch = sinon.stub().resolves({ ok: false, statusText: 'Not Found', }); try { - await (gh as any).getCommitDetails(fakeInfo, token); + await gh.getCommitDetails(fakeInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Failed to retrieve commit details: Not Found/, ); @@ -54,16 +55,16 @@ describe('Fetch Logic', () => { const token = 'tok'; beforeEach(() => { - sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); sinon - .stub(gl as any, 'createHeaders') + .stub(gl, 'createHeaders') .withArgs(token) .returns({ Authorization: 'Bearer tok' }); }); afterEach(() => sinon.restore()); it('throws if diff page 1 fetch fails', async () => { - (globalThis as any).fetch = sinon + globalWithFetch.fetch = sinon .stub() .onFirstCall() .resolves({ ok: true, json: async () => ({ parent_ids: ['p1'] }) }) @@ -71,9 +72,10 @@ describe('Fetch Logic', () => { .resolves({ ok: false, status: 500, statusText: 'Internal Error' }); try { - await (gl as any).getCommitDetails(fakeInfo, token); + await gl.getCommitDetails(fakeInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match(/\[500\] Internal Error/); } }); @@ -88,9 +90,9 @@ describe('Fetch Logic', () => { }; beforeEach(() => { - sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); sinon - .stub(gl as any, 'createHeaders') + .stub(gl, 'createHeaders') .withArgs(token) .returns({ Authorization: 'Bearer token' }); @@ -128,7 +130,7 @@ describe('Fetch Logic', () => { afterEach(() => sinon.restore()); it('handles commit from nested group project', async () => { - const result = await (gl as any).getCommitDetails(fakeInfo, token); + const result = await gl.getCommitDetails(fakeInfo, token); expect(result).to.have.property('sha', 'abc123'); expect(result.files).to.be.an('array'); expect(result.files[0].filename).to.equal('file.pkg'); @@ -142,16 +144,16 @@ describe('Fetch Logic', () => { const pullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; beforeEach(() => { - sinon.stub(gh as any, 'getApiUrl').returns('https://api'); + sinon.stub(gh, 'getApiUrl').returns('https://api'); sinon - .stub(gh as any, 'createHeaders') + .stub(gh, 'createHeaders') .withArgs(token) .returns({ Authorization: 'token token' }); }); afterEach(() => sinon.restore()); it('throws when first page of pull-files is non-ok', async () => { - (globalThis as any).fetch = sinon + globalWithFetch.fetch = sinon .stub() .onFirstCall() .resolves({ ok: true, json: async () => {} }) @@ -159,9 +161,10 @@ describe('Fetch Logic', () => { .resolves({ ok: false, statusText: 'Bad Gateway' }); try { - await (gh as any).getPullDetails(pullInfo, token); + await gh.getPullDetails(pullInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Failed to retrieve paginated data \(page 1\): Bad Gateway/, ); @@ -169,15 +172,16 @@ describe('Fetch Logic', () => { }); it('throws if initial pull metadata fetch fails', async () => { - (globalThis as any).fetch = sinon.stub().resolves({ + globalWithFetch.fetch = sinon.stub().resolves({ ok: false, statusText: 'Not Found', }); try { - await (gh as any).getPullDetails(pullInfo, token); + await gh.getPullDetails(pullInfo, token); expect().fail('Expected error not thrown'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Failed to retrieve pull details: Not Found/, ); @@ -190,16 +194,16 @@ describe('Fetch Logic', () => { const token = 'tok'; beforeEach(() => { - sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); sinon - .stub(gl as any, 'createHeaders') + .stub(gl, 'createHeaders') .withArgs(token) .returns({ Authorization: 'Bearer tok' }); }); afterEach(() => sinon.restore()); it('throws if MR diff page 2 fetch fails', async () => { - (globalThis as any).fetch = (input: any) => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { const urlStr = typeof input === 'string' ? input : input.url; const u = new URL(urlStr); @@ -234,13 +238,19 @@ describe('Fetch Logic', () => { ); } - return Promise.reject(new Error('Unexpected fetch ' + urlStr)); + return Promise.resolve( + new Response(null, { + status: 500, + statusText: 'Unexpected fetch ' + urlStr, + }), + ); }; try { - await (gl as any).getPullDetails(fakeInfo, token); + await gl.getPullDetails(fakeInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Failed to retrieve paginated data \(page 2\): \[502\] Bad Gateway/, ); @@ -248,10 +258,9 @@ describe('Fetch Logic', () => { }); it('throws if final merge request metadata fetch fails', async () => { - (globalThis as any).fetch = (input: any) => { + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { const url = typeof input === 'string' ? input : input.url; const u = new URL(url); - if (u.pathname.endsWith('/diffs')) { if (u.searchParams.get('page') === '1') { return Promise.resolve( @@ -275,21 +284,27 @@ describe('Fetch Logic', () => { ); } return Promise.resolve( - new Response(JSON.stringify([]), { status: 200 }), + new Response(JSON.stringify([]), { + status: 200, + headers: new Headers(), + }), ); } - return Promise.resolve({ - ok: false, - status: 404, - statusText: 'Not Found', - }); + return Promise.resolve( + new Response(null, { + status: 404, + statusText: 'Not Found', + headers: new Headers(), + }), + ); }; try { - await (gl as any).getPullDetails(fakeInfo, token); + await gl.getPullDetails(fakeInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Failed to retrieve merge request details: \[404\] Not Found/, ); @@ -318,6 +333,7 @@ describe('Fetch Logic', () => { cases.forEach(({ name, Adapter, hostInfo, commitUrl, prUrl }) => { describe(`${name} Adapter`, () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any let adapter: any; let fakeFiles: ModifiedFile[]; let stubCommit: sinon.SinonStub; @@ -365,7 +381,8 @@ describe('Fetch Logic', () => { try { await adapter.fetchModifiedFiles('not-a-url', 'token'); throw new Error('Promise did not reject'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match(/Not a valid URL: not-a-url/); } }); @@ -378,7 +395,8 @@ describe('Fetch Logic', () => { : 'https://gitlab.com/foo/bar/-/issues/1'; await adapter.fetchModifiedFiles(badUrl, 'token'); throw new Error('Promise did not reject'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match( /Not a GitHub commit or pull request page/, ); @@ -391,20 +409,26 @@ describe('Fetch Logic', () => { describe('test()', () => { describe('GitHub Adapter', () => { it('returns true when fetch.ok is true', async () => { - (globalThis as any).fetch = () => Promise.resolve({ ok: true } as any); + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { status: 200, statusText: 'OK' }), + ); const result = await gh.test('token'); expect(result).to.equal(true); }); it('returns false then fetch.ok is false', async () => { - (globalThis as any).fetch = () => Promise.resolve({ ok: false } as any); + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { status: 400, statusText: 'Bad Request' }), + ); const result = await gh.test('token'); expect(result).to.equal(false); }); it('returns false on network error', async () => { const errStub = sinon.stub(console, 'error'); - (globalThis as any).fetch = () => Promise.reject(new Error()); + globalWithFetch.fetch = () => Promise.reject(new Error()); const result = await gh.test('token'); expect(result).to.equal(false); errStub.restore(); @@ -413,43 +437,45 @@ describe('Fetch Logic', () => { describe('GitLab Adapter', () => { it('returns true on 200 OK', async () => { - (globalThis as any).fetch = () => - Promise.resolve({ - ok: true, - status: 200, - headers: { get: (_: string) => null }, - } as any); + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 200, + headers: new Headers(), + }), + ); const result = await gl.test('token'); expect(result).to.equal(true); }); it('returns true on 403 with x-gitlab-meta header', async () => { - (globalThis as any).fetch = () => - Promise.resolve({ - ok: false, - status: 403, - headers: { - get: (name: string) => (name === 'x-gitlab-meta' ? 'yes' : null), - }, - } as any); + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 403, + headers: { 'x-gitlab-meta': 'yes' }, + }), + ); const result = await gl.test('token'); expect(result).to.equal(true); }); it('returns false on 403 without x-gitlab-meta header', async () => { - (globalThis as any).fetch = () => - Promise.resolve({ - ok: false, - status: 403, - headers: { get: () => null }, - } as any); + globalWithFetch.fetch = () => + Promise.resolve( + new Response(null, { + status: 403, + statusText: 'Forbidden', + headers: new Headers(), + }), + ); const result = await gl.test('token'); expect(result).to.equal(false); }); it('returns false on network error', async () => { const errStub = sinon.stub(console, 'error'); - (globalThis as any).fetch = () => Promise.reject(new Error()); + globalWithFetch.fetch = () => Promise.reject(new Error()); const result = await gl.test('token'); expect(result).to.equal(false); errStub.restore(); diff --git a/test/scm.gitlab-internals.test.ts b/test/scm.gitlab-internals.test.ts index 05de1d8..a0df1f9 100644 --- a/test/scm.gitlab-internals.test.ts +++ b/test/scm.gitlab-internals.test.ts @@ -1,17 +1,17 @@ -import { scmAdapters } from '../src/scm.js'; import expect from 'expect.js'; import sinon from 'sinon'; +import { createScmAdaptersForTests, globalWithFetch } from './utils.js'; -const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); +const { gl } = createScmAdaptersForTests(); describe('GitLab Adapter internals', () => { it('parseStats() counts additions and deletions correctly', () => { - const stats = (gl as any).parseStats('\n+a\n-b\n+c\n-d\n'); + const stats = gl.parseStats('\n+a\n-b\n+c\n-d\n'); expect(stats).to.eql({ additions: 2, deletions: 2 }); }); it('parseStats() returns zeroes on empty diff', () => { - const stats = (gl as any).parseStats(''); + const stats = gl.parseStats(''); expect(stats).to.eql({ additions: 0, deletions: 0 }); }); @@ -36,7 +36,8 @@ describe('GitLab Adapter internals', () => { generated_file: null, }, ]; - const out = (gl as any).processChanges(raw); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const out = gl.processChanges(raw as any); expect(out).to.have.length(1); expect(out[0].filename).to.equal('keep.pkg'); }); @@ -44,22 +45,23 @@ describe('GitLab Adapter internals', () => { it('getCommitDetails throws on initial metadata fetch failure', async () => { const fakeInfo = { owner: 'foo', repo: 'bar', commitHash: '123abc' }; const token = 'tok'; - sinon.stub(gl as any, 'getApiUrl').returns('https://gitlab.com/api/v4'); + sinon.stub(gl, 'getApiUrl').returns('https://gitlab.com/api/v4'); sinon - .stub(gl as any, 'createHeaders') + .stub(gl, 'createHeaders') .withArgs(token) .returns({ Authorization: 'Bearer tok' }); - (globalThis as any).fetch = sinon.stub().resolves({ + globalWithFetch.fetch = sinon.stub().resolves({ ok: false, status: 401, statusText: 'Unauthorized', }); try { - await (gl as any).getCommitDetails(fakeInfo, token); + await gl.getCommitDetails(fakeInfo, token); expect().fail('Expected error'); - } catch (err: any) { + } catch (err: unknown) { + if (!(err instanceof Error)) throw err; expect(err.message).to.match(/\[401\] Unauthorized/); } finally { sinon.restore(); diff --git a/test/scm.mapping.test.ts b/test/scm.mapping.test.ts index de067e9..725e12c 100644 --- a/test/scm.mapping.test.ts +++ b/test/scm.mapping.test.ts @@ -1,10 +1,8 @@ -import { scmAdapters } from '../src/scm.js'; import { ModifiedFile } from '../src/types.ts'; import expect from 'expect.js'; import sinon, { SinonStub } from 'sinon'; - -const gh = new scmAdapters.github({ host: 'github.com', scm: 'github' }); -const gl = new scmAdapters.gitlab({ host: 'gitlab.com', scm: 'gitlab' }); +import { createScmAdaptersForTests } from './utils.ts'; +const { gh, gl } = createScmAdaptersForTests(); describe('Mapping & Filtering (response files to internal files)', () => { describe('handleCommit()', () => { @@ -44,12 +42,13 @@ describe('Mapping & Filtering (response files to internal files)', () => { }; beforeEach(() => { - sinon.stub(gh as any, 'getCommitDetails').resolves(fakeApiResponse); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sinon.stub(gh, 'getCommitDetails').resolves(fakeApiResponse as any); }); afterEach(() => sinon.restore()); it('filters and maps commit files correctly', async () => { - const result = await (gh as any).handleCommit(fakeCommitInfo, 'token'); + const result = await gh.handleCommit(fakeCommitInfo, 'token'); expect(result).to.have.length(1); const mf: ModifiedFile = result[0]; expect(mf.filename).to.equal('keep.pkg'); @@ -66,12 +65,13 @@ describe('Mapping & Filtering (response files to internal files)', () => { }); it('throws if commitData.files is missing or not an array', async () => { - ((gh as any).getCommitDetails as SinonStub).restore(); + (gh.getCommitDetails as SinonStub).restore(); const stub = sinon - .stub(gh as any, 'getCommitDetails') + .stub(gh, 'getCommitDetails') + // eslint-disable-next-line @typescript-eslint/no-explicit-any .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); try { - await (gh as any).handleCommit(fakeCommitInfo, 'token'); + await gh.handleCommit(fakeCommitInfo, 'token'); throw new Error('Promise did not reject'); } catch (err) { expect(err.message).to.match(/Unable to retrieve modified files/); @@ -104,12 +104,12 @@ describe('Mapping & Filtering (response files to internal files)', () => { }; beforeEach(() => { - sinon.stub(gl as any, 'getCommitDetails').resolves(fakeApiResponse); + sinon.stub(gl, 'getCommitDetails').resolves(fakeApiResponse); }); afterEach(() => sinon.restore()); it('processes stats correctly', async () => { - const result = await (gl as any).handleCommit(fakeCommitInfo, 'token'); + const result = await gl.handleCommit(fakeCommitInfo, 'token'); expect(result).to.have.length(1); const mf: ModifiedFile = result[0]; expect(mf.filename).to.equal('keep.pkg'); @@ -130,12 +130,13 @@ describe('Mapping & Filtering (response files to internal files)', () => { }); it('throws if commitData.files is missing or not an array', async () => { - ((gl as any).getCommitDetails as SinonStub).restore(); + (gl.getCommitDetails as SinonStub).restore(); const stub = sinon - .stub(gl as any, 'getCommitDetails') + .stub(gl, 'getCommitDetails') + // eslint-disable-next-line @typescript-eslint/no-explicit-any .resolves({ sha: 'sha', parents: [{ sha: 'parentSha' }] } as any); try { - await (gl as any).handleCommit(fakeCommitInfo, 'token'); + await gl.handleCommit(fakeCommitInfo, 'token'); throw new Error('Promise did not reject'); } catch (err) { expect(err.message).to.match(/Unable to retrieve modified files/); @@ -178,12 +179,13 @@ describe('Mapping & Filtering (response files to internal files)', () => { }; beforeEach(() => { - sinon.stub(gh as any, 'getPullDetails').resolves(fakeApiResponse); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sinon.stub(gh, 'getPullDetails').resolves(fakeApiResponse as any); }); afterEach(() => sinon.restore()); it('filters and maps pull request files correctly', async () => { - const result: ModifiedFile[] = await (gh as any).handlePullRequest( + const result: ModifiedFile[] = await gh.handlePullRequest( fakePullInfo, 'token', ); @@ -221,12 +223,12 @@ describe('Mapping & Filtering (response files to internal files)', () => { }; beforeEach(() => { - sinon.stub(gl as any, 'getPullDetails').resolves(fakeApiResponse); + sinon.stub(gl, 'getPullDetails').resolves(fakeApiResponse); }); afterEach(() => sinon.restore()); it('maps merge request files correctly', async () => { - const result: ModifiedFile[] = await (gl as any).handlePullRequest( + const result: ModifiedFile[] = await gl.handlePullRequest( fakePullInfo, 'token', ); diff --git a/test/scm.methods.test.ts b/test/scm.methods.test.ts index 5963968..5acc8ad 100644 --- a/test/scm.methods.test.ts +++ b/test/scm.methods.test.ts @@ -38,12 +38,14 @@ describe('Adapter Methods', () => { scm, }) => { it(`${name} default host returns correct API URL`, () => { - const adapter = new Class({ host, scm }); - expect((adapter as any).getApiUrl()).to.equal(expectedApiUrl); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); + expect(adapter.getApiUrl()).to.equal(expectedApiUrl); }); it(`${name} custom host returns correct API URL`, () => { - const adapter = new Class({ host: customHost, scm }); - expect((adapter as any).getApiUrl()).to.equal(expectedCustomApiUrl); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host: customHost, scm }); + expect(adapter.getApiUrl()).to.equal(expectedCustomApiUrl); }); }, ); @@ -52,8 +54,9 @@ describe('Adapter Methods', () => { describe('createHeaders()', () => { adapterCases.forEach(({ name, Class, host, tokenPrefix, scm }) => { it(`${name} adds correct Authorization header`, () => { - const adapter = new Class({ host, scm }); - const headers = (adapter as any).createHeaders('abc123'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); + const headers = adapter.createHeaders('abc123'); expect(headers).to.have.property( 'Authorization', `${tokenPrefix} abc123`, @@ -64,19 +67,16 @@ describe('Adapter Methods', () => { describe('isSupportedFile()', () => { adapterCases.forEach(({ name, Class, host, scm }) => { - const adapter = new Class({ host, scm }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const adapter: any = new Class({ host, scm }); it(`${name} returns true for supported extensions`, () => { SUPPORTED_FILES.forEach((ext) => { - expect((adapter as any).isSupportedFile(`file.${ext}`)).to.equal( - true, - ); + expect(adapter.isSupportedFile(`file.${ext}`)).to.equal(true); }); }); it(`${name} returns false for unsupported or missing extensions`, () => { - expect((adapter as any).isSupportedFile('file.unknownext')).to.equal( - false, - ); - expect((adapter as any).isSupportedFile('file')).to.equal(false); + expect(adapter.isSupportedFile('file.unknownext')).to.equal(false); + expect(adapter.isSupportedFile('file')).to.equal(false); }); }); }); diff --git a/test/scm.url.detection.test.ts b/test/scm.url.detection.test.ts index b258ecc..b0762af 100644 --- a/test/scm.url.detection.test.ts +++ b/test/scm.url.detection.test.ts @@ -1,6 +1,5 @@ -import { scmAdapters } from '../src/scm.js'; -import { HostInfo } from '../src/types.js'; import expect from 'expect.js'; +import { createScmAdaptersForTests } from './utils.js'; const GITHUB_REPO = 'https://github.com/foo/bar'; const GITLAB_REPO = 'https://gitlab.com/foo/bar'; @@ -21,21 +20,14 @@ const GITLAB_MR_VARIANTS = [ 'unexisting_subpage', ]; -const gh = new scmAdapters.github({ - host: 'github.com', - scm: 'github', -} as HostInfo); -const gl = new scmAdapters.gitlab({ - host: 'gitlab.com', - scm: 'gitlab', -} as HostInfo); +const { gh, gl } = createScmAdaptersForTests(); describe('URL Detection', () => { describe('GitHub Adapter', () => { it('recognizes valid commit URLs', () => { const url = new URL(`${GITHUB_REPO}/commit/123abc`); - expect((gh as any).testCommit(url)).to.not.equal(null); - expect((gh as any).testPullRequest(url)).to.equal(null); + expect(gh.testCommit(url)).to.not.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); }); it('returns null for invalid commit URLs', () => { @@ -45,31 +37,31 @@ describe('URL Detection', () => { `${GITHUB_REPO}/commit/`, ].forEach((urlStr) => { const url = new URL(urlStr); - expect((gh as any).testCommit(url)).to.equal(null); - expect((gh as any).testPullRequest(url)).to.equal(null); + expect(gh.testCommit(url)).to.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); }); }); it('recognizes valid pull request URLs (including subpages)', () => { GITHUB_PR_VARIANTS.forEach((suffix) => { const url = new URL(`${GITHUB_REPO}/pull/1/${suffix}`); - expect((gh as any).testPullRequest(url)).to.not.equal(null); - expect((gh as any).testCommit(url)).to.equal(null); + expect(gh.testPullRequest(url)).to.not.equal(null); + expect(gh.testCommit(url)).to.equal(null); }); }); it('returns null for invalid pull request URLs', () => { const url = new URL(`${GITHUB_REPO}/pull`); - expect((gh as any).testCommit(url)).to.equal(null); - expect((gh as any).testPullRequest(url)).to.equal(null); + expect(gh.testCommit(url)).to.equal(null); + expect(gh.testPullRequest(url)).to.equal(null); }); }); describe('GitLab Adapter', () => { it('recognizes valid commit URLs', () => { const url = new URL(`${GITLAB_REPO}/-/commit/123abc`); - expect((gl as any).testCommit(url)).to.not.equal(null); - expect((gl as any).testPullRequest(url)).to.equal(null); + expect(gl.testCommit(url)).to.not.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); }); it('returns null for invalid commit URLs', () => { @@ -79,23 +71,23 @@ describe('URL Detection', () => { `${GITLAB_REPO}/-/commit/`, ].forEach((urlStr) => { const url = new URL(urlStr); - expect((gl as any).testCommit(url)).to.equal(null); - expect((gl as any).testPullRequest(url)).to.equal(null); + expect(gl.testCommit(url)).to.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); }); }); it('recognizes valid merge request URLs (including subpages)', () => { GITLAB_MR_VARIANTS.forEach((suffix) => { const url = new URL(`${GITLAB_REPO}/-/merge_requests/1/${suffix}`); - expect((gl as any).testPullRequest(url)).to.not.equal(null); - expect((gl as any).testCommit(url)).to.equal(null); + expect(gl.testPullRequest(url)).to.not.equal(null); + expect(gl.testCommit(url)).to.equal(null); }); }); it('returns null for invalid merge request URLs', () => { const url = new URL(`${GITLAB_REPO}/-/merge_requests`); - expect((gl as any).testCommit(url)).to.equal(null); - expect((gl as any).testPullRequest(url)).to.equal(null); + expect(gl.testCommit(url)).to.equal(null); + expect(gl.testPullRequest(url)).to.equal(null); }); }); }); diff --git a/test/utils.ts b/test/utils.ts new file mode 100644 index 0000000..83a6b3d --- /dev/null +++ b/test/utils.ts @@ -0,0 +1,44 @@ +import { BaseScmAdapter, scmAdapters } from '../src/scm.js'; + +interface GlobalWithFetch extends GlobalThis { + fetch?: (input: RequestInfo | { url: string }) => Promise; +} + +export const globalWithFetch = globalThis as GlobalWithFetch; + +export type InternalAdapterMethodsGithub = BaseScmAdapter & { + testCommit: typeof scmAdapters.github.prototype.testCommit; + testPullRequest: typeof scmAdapters.github.prototype.testPullRequest; + handlePullRequest: typeof scmAdapters.github.prototype.handlePullRequest; + getPullDetails: typeof scmAdapters.github.prototype.getPullDetails; + handleCommit: typeof scmAdapters.github.prototype.handleCommit; + getCommitDetails: typeof scmAdapters.github.prototype.getCommitDetails; + getApiUrl: typeof scmAdapters.github.prototype.getApiUrl; + createHeaders: typeof scmAdapters.github.prototype.createHeaders; +}; + +export type InternalAdapterMethodsGitlab = BaseScmAdapter & { + testCommit: typeof scmAdapters.gitlab.prototype.testCommit; + testPullRequest: typeof scmAdapters.gitlab.prototype.testPullRequest; + handlePullRequest: typeof scmAdapters.gitlab.prototype.handlePullRequest; + getPullDetails: typeof scmAdapters.gitlab.prototype.getPullDetails; + handleCommit: typeof scmAdapters.gitlab.prototype.handleCommit; + getCommitDetails: typeof scmAdapters.gitlab.prototype.getCommitDetails; + getApiUrl: typeof scmAdapters.gitlab.prototype.getApiUrl; + createHeaders: typeof scmAdapters.gitlab.prototype.createHeaders; + parseStats: typeof scmAdapters.gitlab.prototype.parseStats; + processChanges: typeof scmAdapters.gitlab.prototype.processChanges; +}; + +// Helper to create scm adapter instances for tests that make internal methods accessible +export function createScmAdaptersForTests() { + const gh = new scmAdapters.github({ + host: 'github.com', + scm: 'github', + }) as unknown as InternalAdapterMethodsGithub; + const gl = new scmAdapters.gitlab({ + host: 'gitlab.com', + scm: 'gitlab', + }) as unknown as InternalAdapterMethodsGitlab; + return { gh, gl }; +}