diff --git a/docs/react-component-atlas.md b/docs/react-component-atlas.md index 35e55f883fe..a4ae15f3e2e 100644 --- a/docs/react-component-atlas.md +++ b/docs/react-component-atlas.md @@ -66,12 +66,24 @@ This is a high-level overview of the structure of the React component tree that > > > > > > > > > > > > Render a list of issueish results as rows within the result list of a specific search. > -> > [``](/lib/controllers/file-patch-controller.js) -> > [``](/lib/views/file-patch-view.js) +> > [``](/lib/items/changed-file-item.js) +> > [``](/lib/containers/changed-file-container.js) > > > > The workspace-center pane that appears when looking at the staged or unstaged changes associated with a file. > > -> > :construction: Being rewritten in [#1712](https://github.com/atom/github/pull/1512) :construction: +> > > [``](/lib/controllers/multi-file-patch-controller.js) +> > > [``](/lib/views/multi-file-patch-view.js) +> > > +> > > Render a sequence of git-generated file patches within a TextEditor, using decorations to include contextually +> > > relevant controls. +> +> > [``](/lig/items/commit-preview-item.js) +> > [``](/lib/containers/commit-preview-container.js) +> > +> > The workspace-center pane that appears when looking at _all_ the staged changes that will be going into the next commit. +> > +> > > [``](/lib/controllers/multi-file-patch-controller.js) +> > > [``](/lib/views/multi-file-patch-view.js) > > > [``](/lib/items/issueish-detail-item.js) > > [``](/lib/containers/issueish-detail-container.js) diff --git a/docs/react-component-classification.md b/docs/react-component-classification.md index 75c3f97fd12..05a5929ec37 100644 --- a/docs/react-component-classification.md +++ b/docs/react-component-classification.md @@ -6,7 +6,7 @@ This is a high-level summary of the organization and implementation of our React **Items** are intended to be used as top-level components within subtrees that are rendered into some [Portal](https://reactjs.org/docs/portals.html) and passed to the Atom API, like pane items, dock items, or tooltips. They are mostly responsible for implementing the [Atom "item" contract](https://github.com/atom/atom/blob/a3631f0dafac146185289ac5e37eaff17b8b0209/src/workspace.js#L29-L174). -These live within [`lib/items/`](/lib/items), are tested within [`test/items/`](/test/items), and are named with an `Item` suffix. Examples: `PullRequestDetailItem`, `FilePatchItem`. +These live within [`lib/items/`](/lib/items), are tested within [`test/items/`](/test/items), and are named with an `Item` suffix. Examples: `PullRequestDetailItem`, `ChangedFileItem`. ## Containers diff --git a/keymaps/git.cson b/keymaps/git.cson index 97029fd5611..36bda93362a 100644 --- a/keymaps/git.cson +++ b/keymaps/git.cson @@ -24,8 +24,13 @@ '.github-StagingView': 'tab': 'core:focus-next' 'shift-tab': 'core:focus-previous' - 'o': 'github:open-file' + 'o': 'github:jump-to-file' 'left': 'core:move-left' + 'cmd-left': 'core:move-left' + +'.github-CommitView button': + 'tab': 'core:focus-next' + 'shift-tab': 'core:focus-previous' '.github-StagingView.unstaged-changes-focused': 'cmd-backspace': 'github:discard-changes-in-selected-files' @@ -36,6 +41,11 @@ 'ctrl-enter': 'github:commit' 'shift-tab': 'core:focus-previous' +'.github-CommitView-commitPreview': + 'cmd-left': 'github:dive' + 'ctrl-left': 'github:dive' + 'enter': 'native!' + '.github-FilePatchView atom-text-editor:not([mini])': 'cmd-/': 'github:toggle-patch-selection-mode' 'ctrl-/': 'github:toggle-patch-selection-mode' @@ -43,10 +53,10 @@ 'ctrl-backspace': 'github:discard-selected-lines' 'cmd-enter': 'core:confirm' 'ctrl-enter': 'core:confirm' - 'cmd-right': 'github:surface-file' - 'ctrl-right': 'github:surface-file' - 'cmd-o': 'github:open-file' - 'ctrl-o': 'github:open-file' + 'cmd-right': 'github:surface' + 'ctrl-right': 'github:surface' + 'cmd-o': 'github:jump-to-file' + 'ctrl-o': 'github:jump-to-file' '.github-FilePatchView--hunkMode atom-text-editor:not([mini])': 'down': 'github:select-next-hunk' diff --git a/lib/atom/decoration.js b/lib/atom/decoration.js index 841b8bd22ea..9e3049f1a24 100644 --- a/lib/atom/decoration.js +++ b/lib/atom/decoration.js @@ -2,6 +2,7 @@ import React from 'react'; import ReactDOM from 'react-dom'; import PropTypes from 'prop-types'; import {Disposable} from 'event-kit'; +import cx from 'classnames'; import {createItem, autobind, extractProps} from '../helpers'; import {RefHolderPropType} from '../prop-types'; @@ -49,7 +50,7 @@ class BareDecoration extends React.Component { this.item = null; if (['gutter', 'overlay', 'block'].includes(this.props.type)) { this.domNode = document.createElement('div'); - this.domNode.className = 'react-atom-decoration'; + this.domNode.className = cx('react-atom-decoration', this.props.className); } } diff --git a/lib/atom/uri-pattern.js b/lib/atom/uri-pattern.js index 0a213f50e97..558a54f95d7 100644 --- a/lib/atom/uri-pattern.js +++ b/lib/atom/uri-pattern.js @@ -246,7 +246,7 @@ function dashEscape(raw) { * Reverse the escaping performed by `dashEscape` by un-doubling `-` characters. */ function dashUnescape(escaped) { - return escaped.replace('--', '-'); + return escaped.replace(/--/g, '-'); } /** diff --git a/lib/containers/file-patch-container.js b/lib/containers/changed-file-container.js similarity index 77% rename from lib/containers/file-patch-container.js rename to lib/containers/changed-file-container.js index 3b3865f08ce..fc9a83ee4cc 100644 --- a/lib/containers/file-patch-container.js +++ b/lib/containers/changed-file-container.js @@ -5,9 +5,9 @@ import yubikiri from 'yubikiri'; import {autobind} from '../helpers'; import ObserveModel from '../views/observe-model'; import LoadingView from '../views/loading-view'; -import FilePatchController from '../controllers/file-patch-controller'; +import ChangedFileController from '../controllers/changed-file-controller'; -export default class FilePatchContainer extends React.Component { +export default class ChangedFileContainer extends React.Component { static propTypes = { repository: PropTypes.object.isRequired, stagingStatus: PropTypes.oneOf(['staged', 'unstaged']), @@ -30,8 +30,10 @@ export default class FilePatchContainer extends React.Component { } fetchData(repository) { + const staged = this.props.stagingStatus === 'staged'; + return yubikiri({ - filePatch: repository.getFilePatchForPath(this.props.relPath, {staged: this.props.stagingStatus === 'staged'}), + multiFilePatch: repository.getFilePatchForPath(this.props.relPath, {staged}), isPartiallyStaged: repository.isPartiallyStaged(this.props.relPath), hasUndoHistory: repository.hasDiscardHistory(this.props.relPath), }); @@ -51,10 +53,8 @@ export default class FilePatchContainer extends React.Component { } return ( - ); diff --git a/lib/containers/commit-preview-container.js b/lib/containers/commit-preview-container.js new file mode 100644 index 00000000000..ef77f354fe5 --- /dev/null +++ b/lib/containers/commit-preview-container.js @@ -0,0 +1,41 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import yubikiri from 'yubikiri'; + +import ObserveModel from '../views/observe-model'; +import LoadingView from '../views/loading-view'; +import CommitPreviewController from '../controllers/commit-preview-controller'; + +export default class CommitPreviewContainer extends React.Component { + static propTypes = { + repository: PropTypes.object.isRequired, + } + + fetchData = repository => { + return yubikiri({ + multiFilePatch: repository.getStagedChangesPatch(), + }); + } + + render() { + return ( + + {this.renderResult} + + ); + } + + renderResult = data => { + if (this.props.repository.isLoading() || data === null) { + return ; + } + + return ( + + ); + } +} diff --git a/lib/controllers/changed-file-controller.js b/lib/controllers/changed-file-controller.js new file mode 100644 index 00000000000..a610208a9d0 --- /dev/null +++ b/lib/controllers/changed-file-controller.js @@ -0,0 +1,33 @@ +import React from 'react'; +import PropTypes from 'prop-types'; + +import MultiFilePatchController from './multi-file-patch-controller'; + +export default class ChangedFileController extends React.Component { + static propTypes = { + repository: PropTypes.object.isRequired, + stagingStatus: PropTypes.oneOf(['staged', 'unstaged']), + relPath: PropTypes.string.isRequired, + + workspace: PropTypes.object.isRequired, + commands: PropTypes.object.isRequired, + keymaps: PropTypes.object.isRequired, + tooltips: PropTypes.object.isRequired, + config: PropTypes.object.isRequired, + + destroy: PropTypes.func.isRequired, + undoLastDiscard: PropTypes.func.isRequired, + surfaceFileAtPath: PropTypes.func.isRequired, + } + + render() { + return ( + + ); + } + + surface = () => this.props.surfaceFileAtPath(this.props.relPath, this.props.stagingStatus) +} diff --git a/lib/controllers/commit-controller.js b/lib/controllers/commit-controller.js index db88bc8fe8d..b07be5d280a 100644 --- a/lib/controllers/commit-controller.js +++ b/lib/controllers/commit-controller.js @@ -8,7 +8,9 @@ import fs from 'fs-extra'; import CommitView from '../views/commit-view'; import RefHolder from '../models/ref-holder'; +import CommitPreviewItem from '../items/commit-preview-item'; import {AuthorPropType, UserStorePropType} from '../prop-types'; +import {watchWorkspaceItem} from '../watch-workspace-item'; import {autobind} from '../helpers'; import {addEvent} from '../reporter-proxy'; @@ -42,7 +44,8 @@ export default class CommitController extends React.Component { constructor(props, context) { super(props, context); - autobind(this, 'commit', 'handleMessageChange', 'toggleExpandedCommitMessageEditor', 'grammarAdded'); + autobind(this, 'commit', 'handleMessageChange', 'toggleExpandedCommitMessageEditor', 'grammarAdded', + 'toggleCommitPreview'); this.subscriptions = new CompositeDisposable(); this.refCommitView = new RefHolder(); @@ -51,6 +54,14 @@ export default class CommitController extends React.Component { this.subscriptions.add( this.commitMessageBuffer.onDidChange(this.handleMessageChange), ); + + this.previewWatcher = watchWorkspaceItem( + this.props.workspace, + CommitPreviewItem.buildURI(this.props.repository.getWorkingDirectoryPath()), + this, + 'commitPreviewActive', + ); + this.subscriptions.add(this.previewWatcher); } componentDidMount() { @@ -105,12 +116,21 @@ export default class CommitController extends React.Component { userStore={this.props.userStore} selectedCoAuthors={this.props.selectedCoAuthors} updateSelectedCoAuthors={this.props.updateSelectedCoAuthors} + toggleCommitPreview={this.toggleCommitPreview} + activateCommitPreview={this.activateCommitPreview} + commitPreviewActive={this.state.commitPreviewActive} /> ); } componentDidUpdate(prevProps) { this.commitMessageBuffer.setTextViaDiff(this.getCommitMessage()); + + if (prevProps.repository !== this.props.repository) { + this.previewWatcher.setPattern( + CommitPreviewItem.buildURI(this.props.repository.getWorkingDirectoryPath()), + ); + } } componentWillUnmount() { @@ -240,12 +260,31 @@ export default class CommitController extends React.Component { return this.refCommitView.map(view => view.setFocus(focus)).getOr(false); } - hasFocus() { - return this.refCommitView.map(view => view.hasFocus()).getOr(false); + advanceFocus(...args) { + return this.refCommitView.map(view => view.advanceFocus(...args)).getOr(false); + } + + retreatFocus(...args) { + return this.refCommitView.map(view => view.retreatFocus(...args)).getOr(false); + } + + hasFocusAtBeginning() { + return this.refCommitView.map(view => view.hasFocusAtBeginning()).getOr(false); + } + + toggleCommitPreview() { + addEvent('toggle-commit-preview', {package: 'github'}); + const uri = CommitPreviewItem.buildURI(this.props.repository.getWorkingDirectoryPath()); + if (this.props.workspace.hide(uri)) { + return Promise.resolve(); + } else { + return this.props.workspace.open(uri, {searchAllPanes: true, pending: true}); + } } - hasFocusEditor() { - return this.refCommitView.map(view => view.hasFocusEditor()).getOr(false); + activateCommitPreview = () => { + const uri = CommitPreviewItem.buildURI(this.props.repository.getWorkingDirectoryPath()); + return this.props.workspace.open(uri, {searchAllPanes: true, pending: true, activate: true}); } } diff --git a/lib/controllers/commit-preview-controller.js b/lib/controllers/commit-preview-controller.js new file mode 100644 index 00000000000..f1ce3c988c7 --- /dev/null +++ b/lib/controllers/commit-preview-controller.js @@ -0,0 +1,30 @@ +import React from 'react'; +import PropTypes from 'prop-types'; + +import MultiFilePatchController from './multi-file-patch-controller'; + +export default class CommitPreviewController extends React.Component { + static propTypes = { + repository: PropTypes.object.isRequired, + stagingStatus: PropTypes.oneOf(['staged', 'unstaged']), + + workspace: PropTypes.object.isRequired, + commands: PropTypes.object.isRequired, + keymaps: PropTypes.object.isRequired, + tooltips: PropTypes.object.isRequired, + config: PropTypes.object.isRequired, + + destroy: PropTypes.func.isRequired, + undoLastDiscard: PropTypes.func.isRequired, + surfaceToCommitPreviewButton: PropTypes.func.isRequired, + } + + render() { + return ( + + ); + } +} diff --git a/lib/controllers/git-tab-controller.js b/lib/controllers/git-tab-controller.js index 08c53355af6..57087c7170f 100644 --- a/lib/controllers/git-tab-controller.js +++ b/lib/controllers/git-tab-controller.js @@ -348,6 +348,10 @@ export default class GitTabController extends React.Component { return this.refView.map(view => view.focusAndSelectStagingItem(filePath, stagingStatus)).getOr(null); } + focusAndSelectCommitPreviewButton() { + return this.refView.map(view => view.focusAndSelectCommitPreviewButton()); + } + quietlySelectItem(filePath, stagingStatus) { return this.refView.map(view => view.quietlySelectItem(filePath, stagingStatus)).getOr(null); } diff --git a/lib/controllers/github-tab-controller.js b/lib/controllers/github-tab-controller.js index f8eeb45b51b..153b723d280 100644 --- a/lib/controllers/github-tab-controller.js +++ b/lib/controllers/github-tab-controller.js @@ -19,7 +19,7 @@ export default class GitHubTabController extends React.Component { allRemotes: RemoteSetPropType.isRequired, branches: BranchSetPropType.isRequired, selectedRemoteName: PropTypes.string, - aheadCount: PropTypes.number.isRequired, + aheadCount: PropTypes.number, pushInProgress: PropTypes.bool.isRequired, isLoading: PropTypes.bool.isRequired, } diff --git a/lib/controllers/file-patch-controller.js b/lib/controllers/multi-file-patch-controller.js similarity index 63% rename from lib/controllers/file-patch-controller.js rename to lib/controllers/multi-file-patch-controller.js index 551d026259f..09944505c58 100644 --- a/lib/controllers/file-patch-controller.js +++ b/lib/controllers/multi-file-patch-controller.js @@ -4,16 +4,16 @@ import path from 'path'; import {autobind, equalSets} from '../helpers'; import {addEvent} from '../reporter-proxy'; -import FilePatchItem from '../items/file-patch-item'; -import FilePatchView from '../views/file-patch-view'; +import {MultiFilePatchPropType} from '../prop-types'; +import ChangedFileItem from '../items/changed-file-item'; +import MultiFilePatchView from '../views/multi-file-patch-view'; -export default class FilePatchController extends React.Component { +export default class MultiFilePatchController extends React.Component { static propTypes = { repository: PropTypes.object.isRequired, stagingStatus: PropTypes.oneOf(['staged', 'unstaged']), - relPath: PropTypes.string.isRequired, - filePatch: PropTypes.object.isRequired, - hasUndoHistory: PropTypes.bool.isRequired, + multiFilePatch: MultiFilePatchPropType.isRequired, + hasUndoHistory: PropTypes.bool, workspace: PropTypes.object.isRequired, commands: PropTypes.object.isRequired, @@ -24,7 +24,7 @@ export default class FilePatchController extends React.Component { destroy: PropTypes.func.isRequired, discardLines: PropTypes.func.isRequired, undoLastDiscard: PropTypes.func.isRequired, - surfaceFileAtPath: PropTypes.func.isRequired, + surface: PropTypes.func.isRequired, } constructor(props) { @@ -32,13 +32,14 @@ export default class FilePatchController extends React.Component { autobind( this, 'selectedRowsChanged', - 'undoLastDiscard', 'diveIntoMirrorPatch', 'surfaceFile', 'openFile', + 'undoLastDiscard', 'diveIntoMirrorPatch', 'openFile', 'toggleFile', 'toggleRows', 'toggleModeChange', 'toggleSymlinkChange', 'discardRows', ); this.state = { selectionMode: 'hunk', selectedRows: new Set(), + hasMultipleFileSelections: false, }; this.mouseSelectionInProgress = false; @@ -53,7 +54,7 @@ export default class FilePatchController extends React.Component { componentDidUpdate(prevProps) { if ( this.lastPatchString !== null && - this.lastPatchString !== this.props.filePatch.toString() + this.lastPatchString !== this.props.multiFilePatch.toString() ) { this.resolvePatchChangePromise(); this.patchChangePromise = new Promise(resolve => { @@ -64,15 +65,15 @@ export default class FilePatchController extends React.Component { render() { return ( - 0) { editor.setCursorBufferPosition(positions[0], {autoscroll: false}); for (const position of positions.slice(1)) { @@ -122,17 +119,18 @@ export default class FilePatchController extends React.Component { return editor; } - toggleFile() { + toggleFile(filePatch) { return this.stagingOperation(() => { const methodName = this.withStagingStatus({staged: 'unstageFiles', unstaged: 'stageFiles'}); - return this.props.repository[methodName]([this.props.relPath]); + return this.props.repository[methodName]([filePatch.getPath()]); }); } async toggleRows(rowSet, nextSelectionMode) { let chosenRows = rowSet; if (chosenRows) { - await this.selectedRowsChanged(chosenRows, nextSelectionMode); + const nextMultipleFileSelections = this.props.multiFilePatch.spansMultipleFiles(chosenRows); + await this.selectedRowsChanged(chosenRows, nextSelectionMode, nextMultipleFileSelections); } else { chosenRows = this.state.selectedRows; } @@ -143,26 +141,27 @@ export default class FilePatchController extends React.Component { return this.stagingOperation(() => { const patch = this.withStagingStatus({ - staged: () => this.props.filePatch.getUnstagePatchForLines(chosenRows), - unstaged: () => this.props.filePatch.getStagePatchForLines(chosenRows), + staged: () => this.props.multiFilePatch.getUnstagePatchForLines(chosenRows), + unstaged: () => this.props.multiFilePatch.getStagePatchForLines(chosenRows), }); return this.props.repository.applyPatchToIndex(patch); }); } - toggleModeChange() { + toggleModeChange(filePatch) { return this.stagingOperation(() => { const targetMode = this.withStagingStatus({ - unstaged: this.props.filePatch.getNewMode(), - staged: this.props.filePatch.getOldMode(), + unstaged: filePatch.getNewMode(), + staged: filePatch.getOldMode(), }); - return this.props.repository.stageFileModeChange(this.props.relPath, targetMode); + return this.props.repository.stageFileModeChange(filePatch.getPath(), targetMode); }); } - toggleSymlinkChange() { + toggleSymlinkChange(filePatch) { return this.stagingOperation(() => { - const {filePatch, relPath, repository} = this.props; + const relPath = filePatch.getPath(); + const repository = this.props.repository; return this.withStagingStatus({ unstaged: () => { if (filePatch.hasTypechange() && filePatch.getStatus() === 'added') { @@ -183,30 +182,47 @@ export default class FilePatchController extends React.Component { } async discardRows(rowSet, nextSelectionMode, {eventSource} = {}) { + // (kuychaco) For now we only support discarding rows for MultiFilePatches that contain a single file patch + // The only way to access this method from the UI is to be in a ChangedFileItem, which only has a single file patch + // This check is duplicated in RootController#discardLines. We also want it here to prevent us from sending metrics + // unnecessarily + if (this.props.multiFilePatch.getFilePatches().length !== 1) { + return Promise.resolve(null); + } + let chosenRows = rowSet; if (chosenRows) { - await this.selectedRowsChanged(chosenRows, nextSelectionMode); + const nextMultipleFileSelections = this.props.multiFilePatch.spansMultipleFiles(chosenRows); + await this.selectedRowsChanged(chosenRows, nextSelectionMode, nextMultipleFileSelections); } else { chosenRows = this.state.selectedRows; } addEvent('discard-unstaged-changes', { package: 'github', - component: 'FilePatchController', + component: this.constructor.name, lineCount: chosenRows.size, eventSource, }); - return this.props.discardLines(this.props.filePatch, chosenRows, this.props.repository); + return this.props.discardLines(this.props.multiFilePatch, chosenRows, this.props.repository); } - selectedRowsChanged(rows, nextSelectionMode) { - if (equalSets(this.state.selectedRows, rows) && this.state.selectionMode === nextSelectionMode) { + selectedRowsChanged(rows, nextSelectionMode, nextMultipleFileSelections) { + if ( + equalSets(this.state.selectedRows, rows) && + this.state.selectionMode === nextSelectionMode && + this.state.hasMultipleFileSelections === nextMultipleFileSelections + ) { return Promise.resolve(); } return new Promise(resolve => { - this.setState({selectedRows: rows, selectionMode: nextSelectionMode}, resolve); + this.setState({ + selectedRows: rows, + selectionMode: nextSelectionMode, + hasMultipleFileSelections: nextMultipleFileSelections, + }, resolve); }); } @@ -225,7 +241,7 @@ export default class FilePatchController extends React.Component { } this.stagingOperationInProgress = true; - this.lastPatchString = this.props.filePatch.toString(); + this.lastPatchString = this.props.multiFilePatch.toString(); const operationPromise = fn(); operationPromise diff --git a/lib/controllers/root-controller.js b/lib/controllers/root-controller.js index 3b4a6ad9f0f..ec5d22f536f 100644 --- a/lib/controllers/root-controller.js +++ b/lib/controllers/root-controller.js @@ -15,8 +15,9 @@ import InitDialog from '../views/init-dialog'; import CredentialDialog from '../views/credential-dialog'; import Commands, {Command} from '../atom/commands'; import GitTimingsView from '../views/git-timings-view'; -import FilePatchItem from '../items/file-patch-item'; +import ChangedFileItem from '../items/changed-file-item'; import IssueishDetailItem from '../items/issueish-detail-item'; +import CommitPreviewItem from '../items/commit-preview-item'; import GitTabItem from '../items/git-tab-item'; import GitHubTabItem from '../items/github-tab-item'; import StatusBarTileController from './status-bar-tile-controller'; @@ -66,7 +67,7 @@ export default class RootController extends React.Component { this, 'installReactDevTools', 'clearGithubToken', 'initializeRepo', 'showOpenIssueishDialog', 'showWaterfallDiagnostics', 'showCacheDiagnostics', 'acceptClone', 'cancelClone', 'acceptInit', 'cancelInit', - 'acceptOpenIssueish', 'cancelOpenIssueish', 'surfaceFromFileAtPath', 'destroyFilePatchPaneItems', + 'acceptOpenIssueish', 'cancelOpenIssueish', 'destroyFilePatchPaneItems', 'destroyEmptyFilePatchPaneItems', 'openCloneDialog', 'quietlySelectItem', 'viewUnstagedChangesForCurrentFile', 'viewStagedChangesForCurrentFile', 'openFiles', 'getUnsavedFiles', 'ensureNoUnsavedFiles', 'discardWorkDirChangesForPaths', 'discardLines', 'undoLastDiscard', 'refreshResolutionProgress', @@ -128,6 +129,7 @@ export default class RootController extends React.Component { return ( {devMode && } + @@ -316,9 +318,9 @@ export default class RootController extends React.Component { + uriPattern={ChangedFileItem.uriPattern}> {({itemHolder, params}) => ( - )} + + {({itemHolder, params}) => ( + + )} + {({itemHolder, params}) => ( { + const workdir = this.props.repository.getWorkingDirectoryPath(); + return this.props.workspace.toggle(CommitPreviewItem.buildURI(workdir)); + } + showOpenIssueishDialog() { this.setState({openIssueishDialogActive: true}); } @@ -519,11 +548,16 @@ export default class RootController extends React.Component { this.setState({openIssueishDialogActive: false}); } - surfaceFromFileAtPath(filePath, stagingStatus) { + surfaceFromFileAtPath = (filePath, stagingStatus) => { const gitTab = this.gitTabTracker.getComponent(); return gitTab && gitTab.focusAndSelectStagingItem(filePath, stagingStatus); } + surfaceToCommitPreviewButton = () => { + const gitTab = this.gitTabTracker.getComponent(); + return gitTab && gitTab.focusAndSelectCommitPreviewButton(); + } + destroyFilePatchPaneItems() { destroyFilePatchPaneItems({onlyStaged: false}, this.props.workspace); } @@ -580,14 +614,14 @@ export default class RootController extends React.Component { pane.splitDown(); } const lineNum = editor.getCursorBufferPosition().row + 1; - const filePatchItem = await this.props.workspace.open( - FilePatchItem.buildURI(filePath, repoPath, stagingStatus), + const item = await this.props.workspace.open( + ChangedFileItem.buildURI(filePath, repoPath, stagingStatus), {pending: true, activatePane: true, activateItem: true}, ); - await filePatchItem.getRealItemPromise(); - await filePatchItem.getFilePatchLoadedPromise(); - filePatchItem.goToDiffLine(lineNum); - filePatchItem.focus(); + await item.getRealItemPromise(); + await item.getFilePatchLoadedPromise(); + item.goToDiffLine(lineNum); + item.focus(); } else { throw new Error(`${absFilePath} does not belong to repo ${repoPath}`); } @@ -646,10 +680,16 @@ export default class RootController extends React.Component { ); } - async discardLines(filePatch, lines, repository = this.props.repository) { - const filePath = filePatch.getPath(); + async discardLines(multiFilePatch, lines, repository = this.props.repository) { + // (kuychaco) For now we only support discarding rows for MultiFilePatches that contain a single file patch + // The only way to access this method from the UI is to be in a ChangedFileItem, which only has a single file patch + if (multiFilePatch.getFilePatches().length !== 1) { + return Promise.resolve(null); + } + + const filePath = multiFilePatch.getFilePatches()[0].getPath(); const destructiveAction = async () => { - const discardFilePatch = filePatch.getUnstagePatchForLines(lines); + const discardFilePatch = multiFilePatch.getUnstagePatchForLines(lines); await repository.applyPatchToWorkdir(discardFilePatch); }; return await repository.storeBeforeAndAfterBlobs( diff --git a/lib/git-shell-out-strategy.js b/lib/git-shell-out-strategy.js index 7c8ceb51875..25d0122a321 100644 --- a/lib/git-shell-out-strategy.js +++ b/lib/git-shell-out-strategy.js @@ -20,6 +20,7 @@ import { normalizeGitHelperPath, toNativePathSep, toGitPathSep, LINE_ENDING_REGEX, CO_AUTHOR_REGEX, } from './helpers'; import GitTimingsView from './views/git-timings-view'; +import File from './models/patch/file'; import WorkerManager from './worker-manager'; const MAX_STATUS_OUTPUT_LENGTH = 1024 * 1024 * 10; @@ -640,12 +641,12 @@ export default class GitShellOutStrategy { let mode; let realpath; if (executable) { - mode = '100755'; + mode = File.modes.EXECUTABLE; } else if (symlink) { - mode = '120000'; + mode = File.modes.SYMLINK; realpath = await fs.realpath(absPath); } else { - mode = '100644'; + mode = File.modes.NORMAL; } rawDiffs.push(buildAddedFilePatch(filePath, binary ? null : contents, mode, realpath)); @@ -656,6 +657,23 @@ export default class GitShellOutStrategy { return rawDiffs; } + async getStagedChangesPatch() { + const output = await this.exec([ + 'diff', '--staged', '--no-prefix', '--no-ext-diff', '--no-renames', '--diff-filter=u', + ]); + + if (!output) { + return []; + } + + const diffs = parseDiff(output); + for (const diff of diffs) { + if (diff.oldPath) { diff.oldPath = toNativePathSep(diff.oldPath); } + if (diff.newPath) { diff.newPath = toNativePathSep(diff.newPath); } + } + return diffs; + } + /** * Miscellaneous getters */ @@ -1044,11 +1062,11 @@ export default class GitShellOutStrategy { const executable = await isFileExecutable(path.join(this.workingDir, filePath)); const symlink = await isFileSymlink(path.join(this.workingDir, filePath)); if (executable) { - return '100755'; + return File.modes.EXECUTABLE; } else if (symlink) { - return '120000'; + return File.modes.SYMLINK; } else { - return '100644'; + return File.modes.NORMAL; } } } @@ -1061,11 +1079,13 @@ export default class GitShellOutStrategy { function buildAddedFilePatch(filePath, contents, mode, realpath) { const hunks = []; if (contents) { - const noNewLine = contents[contents.length - 1] !== '\n'; + let noNewLine; let lines; - if (mode === '120000') { + if (mode === File.modes.SYMLINK) { + noNewLine = false; lines = [`+${toGitPathSep(realpath)}`, '\\ No newline at end of file']; } else { + noNewLine = contents[contents.length - 1] !== '\n'; lines = contents.trim().split(LINE_ENDING_REGEX).map(line => `+${line}`); } if (noNewLine) { lines.push('\\ No newline at end of file'); } diff --git a/lib/github-package.js b/lib/github-package.js index b5b65be47ef..0da56cd648a 100644 --- a/lib/github-package.js +++ b/lib/github-package.js @@ -379,6 +379,16 @@ export default class GithubPackage { return item; } + createCommitPreviewStub({uri}) { + const item = StubItem.create('git-commit-preview', { + title: 'Commit preview', + }, uri); + if (this.controller) { + this.rerender(); + } + return item; + } + destroyGitTabItem() { if (this.gitTabStubItem) { this.gitTabStubItem.destroy(); diff --git a/lib/helpers.js b/lib/helpers.js index def4f8d70cc..74a77f9460c 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -3,7 +3,7 @@ import fs from 'fs-extra'; import os from 'os'; import temp from 'temp'; -import FilePatchController from './controllers/file-patch-controller'; +import MultiFilePatchController from './controllers/multi-file-patch-controller'; import RefHolder from './models/ref-holder'; export const LINE_ENDING_REGEX = /\r?\n/; @@ -377,7 +377,7 @@ export function getCommitMessageEditors(repository, workspace) { export function getFilePatchPaneItems({onlyStaged, empty} = {}, workspace) { return workspace.getPaneItems().filter(item => { - const isFilePatchItem = item && item.getRealItem && item.getRealItem() instanceof FilePatchController; + const isFilePatchItem = item && item.getRealItem && item.getRealItem() instanceof MultiFilePatchController; if (onlyStaged) { return isFilePatchItem && item.stagingStatus === 'staged'; } else if (empty) { diff --git a/lib/items/file-patch-item.js b/lib/items/changed-file-item.js similarity index 89% rename from lib/items/file-patch-item.js rename to lib/items/changed-file-item.js index b6e56347dca..bff241401ce 100644 --- a/lib/items/file-patch-item.js +++ b/lib/items/changed-file-item.js @@ -4,9 +4,9 @@ import {Emitter} from 'event-kit'; import {WorkdirContextPoolPropType} from '../prop-types'; import {autobind} from '../helpers'; -import FilePatchContainer from '../containers/file-patch-container'; +import ChangedFileContainer from '../containers/changed-file-container'; -export default class FilePatchItem extends React.Component { +export default class ChangedFileItem extends React.Component { static propTypes = { workdirContextPool: WorkdirContextPoolPropType.isRequired, @@ -77,7 +77,8 @@ export default class FilePatchItem extends React.Component { const repository = this.props.workdirContextPool.getContext(this.props.workingDirectory).getRepository(); return ( - { + /* istanbul ignore else */ + if (!this.isDestroyed) { + this.emitter.emit('did-destroy'); + this.isDestroyed = true; + } + } + + onDidDestroy(callback) { + return this.emitter.on('did-destroy', callback); + } + + render() { + const repository = this.props.workdirContextPool.getContext(this.props.workingDirectory).getRepository(); + + return ( + + ); + } + + getTitle() { + return 'Commit preview'; + } + + getIconName() { + return 'git-commit'; + } + + getWorkingDirectory() { + return this.props.workingDirectory; + } + + serialize() { + return { + deserializer: 'CommitPreviewStub', + uri: CommitPreviewItem.buildURI(this.props.workingDirectory), + }; + } + + focus() { + this.refInitialFocus.map(focusable => focusable.focus()); + } +} diff --git a/lib/items/git-tab-item.js b/lib/items/git-tab-item.js index 75ea9601761..254c9d717aa 100644 --- a/lib/items/git-tab-item.js +++ b/lib/items/git-tab-item.js @@ -83,6 +83,10 @@ export default class GitTabItem extends React.Component { return this.refController.map(c => c.focusAndSelectStagingItem(...args)); } + focusAndSelectCommitPreviewButton() { + return this.refController.map(c => c.focusAndSelectCommitPreviewButton()); + } + quietlySelectItem(...args) { return this.refController.map(c => c.quietlySelectItem(...args)); } diff --git a/lib/models/patch/builder.js b/lib/models/patch/builder.js index a773e3d9768..e8e18982a7b 100644 --- a/lib/models/patch/builder.js +++ b/lib/models/patch/builder.js @@ -5,27 +5,82 @@ import File, {nullFile} from './file'; import Patch from './patch'; import {Unchanged, Addition, Deletion, NoNewline} from './region'; import FilePatch from './file-patch'; +import MultiFilePatch from './multi-file-patch'; -export default function buildFilePatch(diffs) { +export function buildFilePatch(diffs) { + const layeredBuffer = initializeBuffer(); + + let filePatch; if (diffs.length === 0) { - return emptyDiffFilePatch(); + filePatch = emptyDiffFilePatch(); } else if (diffs.length === 1) { - return singleDiffFilePatch(diffs[0]); + filePatch = singleDiffFilePatch(diffs[0], layeredBuffer); } else if (diffs.length === 2) { - return dualDiffFilePatch(...diffs); + filePatch = dualDiffFilePatch(diffs[0], diffs[1], layeredBuffer); } else { throw new Error(`Unexpected number of diffs: ${diffs.length}`); } + + return new MultiFilePatch({filePatches: [filePatch], ...layeredBuffer}); +} + +export function buildMultiFilePatch(diffs) { + const layeredBuffer = initializeBuffer(); + const byPath = new Map(); + const actions = []; + + let index = 0; + for (const diff of diffs) { + const thePath = diff.oldPath || diff.newPath; + + if (diff.status === 'added' || diff.status === 'deleted') { + // Potential paired diff. Either a symlink deletion + content addition or a symlink addition + + // content deletion. + const otherHalf = byPath.get(thePath); + if (otherHalf) { + // The second half. Complete the paired diff, or fail if they have unexpected statuses or modes. + const [otherDiff, otherIndex] = otherHalf; + actions[otherIndex] = () => dualDiffFilePatch(diff, otherDiff, layeredBuffer); + byPath.delete(thePath); + } else { + // The first half we've seen. + byPath.set(thePath, [diff, index]); + index++; + } + } else { + actions[index] = () => singleDiffFilePatch(diff, layeredBuffer); + index++; + } + } + + // Populate unpaired diffs that looked like they could be part of a pair, but weren't. + for (const [unpairedDiff, originalIndex] of byPath.values()) { + actions[originalIndex] = () => singleDiffFilePatch(unpairedDiff, layeredBuffer); + } + + const filePatches = actions.map(action => action()); + + // Fix markers for patches with no hunks. + // Head position was moved everytime lines were appended. + filePatches.forEach(filePatch => { + if (filePatch.getHunks().length === 0) { + const marker = filePatch.getMarker(); + marker.setHeadPosition(marker.getTailPosition()); + } + }); + + return new MultiFilePatch({filePatches, ...layeredBuffer}); } function emptyDiffFilePatch() { return FilePatch.createNull(); } -function singleDiffFilePatch(diff) { - const wasSymlink = diff.oldMode === '120000'; - const isSymlink = diff.newMode === '120000'; - const [hunks, buffer, layers] = buildHunks(diff); +function singleDiffFilePatch(diff, layeredBuffer) { + const wasSymlink = diff.oldMode === File.modes.SYMLINK; + const isSymlink = diff.newMode === File.modes.SYMLINK; + + const [hunks, patchMarker] = buildHunks(diff, layeredBuffer); let oldSymlink = null; let newSymlink = null; @@ -44,14 +99,14 @@ function singleDiffFilePatch(diff) { const newFile = diff.newPath !== null || diff.newMode !== null ? new File({path: diff.newPath, mode: diff.newMode, symlink: newSymlink}) : nullFile; - const patch = new Patch({status: diff.status, hunks, buffer, layers}); + const patch = new Patch({status: diff.status, hunks, marker: patchMarker, buffer: layeredBuffer.buffer}); return new FilePatch(oldFile, newFile, patch); } -function dualDiffFilePatch(diff1, diff2) { +function dualDiffFilePatch(diff1, diff2, layeredBuffer) { let modeChangeDiff, contentChangeDiff; - if (diff1.oldMode === '120000' || diff1.newMode === '120000') { + if (diff1.oldMode === File.modes.SYMLINK || diff1.newMode === File.modes.SYMLINK) { modeChangeDiff = diff1; contentChangeDiff = diff2; } else { @@ -59,7 +114,7 @@ function dualDiffFilePatch(diff1, diff2) { contentChangeDiff = diff1; } - const [hunks, buffer, layers] = buildHunks(contentChangeDiff); + const [hunks, patchMarker] = buildHunks(contentChangeDiff, layeredBuffer); const filePath = contentChangeDiff.oldPath || contentChangeDiff.newPath; const symlink = modeChangeDiff.hunks[0].lines[0].slice(1); @@ -85,7 +140,7 @@ function dualDiffFilePatch(diff1, diff2) { const oldFile = new File({path: filePath, mode: oldMode, symlink: oldSymlink}); const newFile = new File({path: filePath, mode: newMode, symlink: newSymlink}); - const patch = new Patch({status, hunks, buffer, layers}); + const patch = new Patch({status, hunks, marker: patchMarker, buffer: layeredBuffer.buffer}); return new FilePatch(oldFile, newFile, patch); } @@ -97,12 +152,19 @@ const CHANGEKIND = { '\\': NoNewline, }; -function buildHunks(diff) { +function initializeBuffer() { const buffer = new TextBuffer(); - const layers = ['hunk', 'unchanged', 'addition', 'deletion', 'noNewline'].reduce((obj, key) => { + buffer.retain(); + + const layers = ['patch', 'hunk', 'unchanged', 'addition', 'deletion', 'noNewline'].reduce((obj, key) => { obj[key] = buffer.addMarkerLayer(); return obj; }, {}); + + return {buffer, layers}; +} + +function buildHunks(diff, {buffer, layers}) { const layersByKind = new Map([ [Unchanged, layers.unchanged], [Addition, layers.addition], @@ -111,7 +173,18 @@ function buildHunks(diff) { ]); const hunks = []; - let bufferRow = 0; + const patchStartRow = buffer.getLastRow(); + let bufferRow = patchStartRow; + let nextLineLength = 0; + + if (diff.hunks.length === 0) { + const patchMarker = layers.patch.markPosition( + [patchStartRow, 0], + {invalidate: 'never', exclusive: false}, + ); + + return [hunks, patchMarker]; + } for (const hunkData of diff.hunks) { const bufferStartRow = bufferRow; @@ -121,7 +194,6 @@ function buildHunks(diff) { let LastChangeKind = null; let currentRangeStart = bufferRow; let lastLineLength = 0; - let nextLineLength = 0; const finishCurrentRange = () => { if (currentRangeStart === bufferRow) { @@ -173,5 +245,10 @@ function buildHunks(diff) { })); } - return [hunks, buffer, layers]; + const patchMarker = layers.patch.markRange( + [[patchStartRow, 0], [bufferRow - 1, nextLineLength]], + {invalidate: 'never', exclusive: false}, + ); + + return [hunks, patchMarker]; } diff --git a/lib/models/patch/file-patch.js b/lib/models/patch/file-patch.js index e43719b0f0a..c40bd48dd3e 100644 --- a/lib/models/patch/file-patch.js +++ b/lib/models/patch/file-patch.js @@ -29,6 +29,14 @@ export default class FilePatch { return this.patch; } + getMarker() { + return this.getPatch().getMarker(); + } + + getStartRange() { + return this.getPatch().getStartRange(); + } + getOldPath() { return this.getOldFile().getPath(); } @@ -53,76 +61,16 @@ export default class FilePatch { return this.getNewFile().getSymlink(); } - getByteSize() { - return this.getPatch().getByteSize(); - } - - getHunkAt(bufferRow) { - return this.getPatch().getHunkAt(bufferRow); - } - - getBuffer() { - return this.getPatch().getBuffer(); + getFirstChangeRange() { + return this.getPatch().getFirstChangeRange(); } getMaxLineNumberWidth() { return this.getPatch().getMaxLineNumberWidth(); } - getHunkLayer() { - return this.getPatch().getHunkLayer(); - } - - getUnchangedLayer() { - return this.getPatch().getUnchangedLayer(); - } - - getAdditionLayer() { - return this.getPatch().getAdditionLayer(); - } - - getDeletionLayer() { - return this.getPatch().getDeletionLayer(); - } - - getNoNewlineLayer() { - return this.getPatch().getNoNewlineLayer(); - } - - // TODO delete if unused - getAdditionRanges() { - return this.getHunks().reduce((acc, hunk) => { - acc.push(...hunk.getAdditionRanges()); - return acc; - }, []); - } - - // TODO delete if unused - getDeletionRanges() { - return this.getHunks().reduce((acc, hunk) => { - acc.push(...hunk.getDeletionRanges()); - return acc; - }, []); - } - - // TODO delete if unused - getNoNewlineRanges() { - const hunks = this.getHunks(); - const lastHunk = hunks[hunks.length - 1]; - if (!lastHunk) { - return []; - } - - const range = lastHunk.getNoNewlineRange(); - if (!range) { - return []; - } - - return [range]; - } - - adoptBufferFrom(prevFilePatch) { - this.getPatch().adoptBufferFrom(prevFilePatch.getPatch()); + containsRow(row) { + return this.getPatch().containsRow(row); } didChangeExecutableMode() { @@ -167,69 +115,64 @@ export default class FilePatch { ); } - getStagePatchForLines(selectedLineSet) { - if (this.patch.getChangedLineCount() === selectedLineSet.size) { - if (this.hasTypechange() && this.getStatus() === 'deleted') { - // handle special case when symlink is created where a file was deleted. In order to stage the file deletion, - // we must ensure that the created file patch has no new file - return this.clone({newFile: nullFile}); + buildStagePatchForLines(originalBuffer, nextLayeredBuffer, selectedLineSet) { + let newFile = this.getNewFile(); + if (this.getStatus() === 'deleted') { + if ( + this.patch.getChangedLineCount() === selectedLineSet.size && + Array.from(selectedLineSet, row => this.patch.containsRow(row)).every(Boolean) + ) { + // Whole file deletion staged. + newFile = nullFile; } else { - return this; - } - } else { - const patch = this.patch.getStagePatchForLines(selectedLineSet); - if (this.getStatus() === 'deleted') { - // Populate newFile - return this.clone({newFile: this.getOldFile(), patch}); - } else { - return this.clone({patch}); + // Partial file deletion, which becomes a modification. + newFile = this.getOldFile(); } } - } - getStagePatchForHunk(selectedHunk) { - return this.getStagePatchForLines(new Set(selectedHunk.getBufferRows())); + const patch = this.patch.buildStagePatchForLines( + originalBuffer, + nextLayeredBuffer, + selectedLineSet, + ); + return this.clone({newFile, patch}); } - getUnstagePatchForLines(selectedLineSet) { - const wholeFile = this.patch.getChangedLineCount() === selectedLineSet.size; + buildUnstagePatchForLines(originalBuffer, nextLayeredBuffer, selectedLineSet) { const nonNullFile = this.getNewFile().isPresent() ? this.getNewFile() : this.getOldFile(); let oldFile = this.getNewFile(); let newFile = nonNullFile; - if (wholeFile && this.getStatus() === 'added') { - // Ensure that newFile is null if the patch is an addition because we're deleting the entire file from the - // index. If a symlink was deleted and replaced by a non-symlink file, we don't want the symlink entry to muck - // up the patch. - oldFile = nonNullFile; - newFile = nullFile; - } else if (wholeFile && this.getStatus() === 'deleted') { - oldFile = nullFile; - newFile = nonNullFile; + if (this.getStatus() === 'added') { + if ( + selectedLineSet.size === this.patch.getChangedLineCount() && + Array.from(selectedLineSet, row => this.patch.containsRow(row)).every(Boolean) + ) { + // Ensure that newFile is null if the patch is an addition because we're deleting the entire file from the + // index. If a symlink was deleted and replaced by a non-symlink file, we don't want the symlink entry to muck + // up the patch. + oldFile = nonNullFile; + newFile = nullFile; + } + } else if (this.getStatus() === 'deleted') { + if ( + selectedLineSet.size === this.patch.getChangedLineCount() && + Array.from(selectedLineSet, row => this.patch.containsRow(row)).every(Boolean) + ) { + oldFile = nullFile; + newFile = nonNullFile; + } } - return this.clone({oldFile, newFile, patch: this.patch.getUnstagePatchForLines(selectedLineSet)}); - } - - getUnstagePatchForHunk(hunk) { - return this.getUnstagePatchForLines(new Set(hunk.getBufferRows())); - } - - getNextSelectionRange(lastFilePatch, lastSelectedRows) { - return this.getPatch().getNextSelectionRange(lastFilePatch.getPatch(), lastSelectedRows); - } - - isEqual(other) { - if (!(other instanceof this.constructor)) { return false; } - - return this === other || ( - this.oldFile.isEqual(other.oldFile) && - this.newFile.isEqual(other.newFile) && - this.patch.isEqual(other.patch) + const patch = this.patch.buildUnstagePatchForLines( + originalBuffer, + nextLayeredBuffer, + selectedLineSet, ); + return this.clone({oldFile, newFile, patch}); } - toString() { + toStringIn(buffer) { if (!this.isPresent()) { return ''; } @@ -245,7 +188,7 @@ export default class FilePatch { patch: this.getNewSymlink() ? this.getPatch().clone({status: 'added'}) : this.getPatch(), }); - return left.toString() + right.toString(); + return left.toStringIn(buffer) + right.toStringIn(buffer); } else if (this.getStatus() === 'added' && this.getNewFile().isSymlink()) { const symlinkPath = this.getNewSymlink(); return this.getHeaderString() + `@@ -0,0 +1 @@\n+${symlinkPath}\n\\ No newline at end of file\n`; @@ -253,7 +196,7 @@ export default class FilePatch { const symlinkPath = this.getOldSymlink(); return this.getHeaderString() + `@@ -1 +0,0 @@\n-${symlinkPath}\n\\ No newline at end of file\n`; } else { - return this.getHeaderString() + this.getPatch().toString(); + return this.getHeaderString() + this.getPatch().toStringIn(buffer); } } diff --git a/lib/models/patch/file.js b/lib/models/patch/file.js index b652e1884c6..0c893ca4f10 100644 --- a/lib/models/patch/file.js +++ b/lib/models/patch/file.js @@ -1,4 +1,18 @@ export default class File { + static modes = { + // Non-executable, non-symlink + NORMAL: '100644', + + // +x bit set + EXECUTABLE: '100755', + + // Soft link to another filesystem location + SYMLINK: '120000', + + // Submodule mount point + GITLINK: '160000', + } + constructor({path, mode, symlink}) { this.path = path; this.mode = mode; @@ -18,31 +32,21 @@ export default class File { } isSymlink() { - return this.getMode() === '120000'; + return this.getMode() === this.constructor.modes.SYMLINK; } isRegularFile() { - return this.getMode() === '100644' || this.getMode() === '100755'; + return this.getMode() === this.constructor.modes.NORMAL || this.getMode() === this.constructor.modes.EXECUTABLE; } isExecutable() { - return this.getMode() === '100755'; + return this.getMode() === this.constructor.modes.EXECUTABLE; } isPresent() { return true; } - isEqual(other) { - if (!other.isPresent()) { return false; } - - return other === this || ( - this.path === other.path && - this.mode === other.mode && - this.symlink === other.symlink - ); - } - clone(opts = {}) { return new File({ path: opts.path !== undefined ? opts.path : this.path, @@ -84,10 +88,6 @@ export const nullFile = { return false; }, - isEqual(other) { - return other === this; - }, - clone(opts = {}) { if (opts.path === undefined && opts.mode === undefined && opts.symlink === undefined) { return this; diff --git a/lib/models/patch/hunk.js b/lib/models/patch/hunk.js index c299a04c179..84cab390028 100644 --- a/lib/models/patch/hunk.js +++ b/lib/models/patch/hunk.js @@ -50,23 +50,6 @@ export default class Hunk { return this.regions.filter(change => change.isChange()); } - getAdditionRanges() { - return this.regions.filter(change => change.isAddition()).map(change => change.getRange()); - } - - getDeletionRanges() { - return this.regions.filter(change => change.isDeletion()).map(change => change.getRange()); - } - - getNoNewlineRange() { - const lastRegion = this.regions[this.regions.length - 1]; - if (lastRegion && lastRegion.isNoNewline()) { - return lastRegion.getRange(); - } else { - return null; - } - } - getMarker() { return this.marker; } @@ -156,21 +139,6 @@ export default class Hunk { this.marker = markable.markRange(this.getRange(), {invalidate: 'never', exclusive: false}); } - isEqual(other) { - if (this === other) { return true; } - - if (this.oldStartRow !== other.oldStartRow) { return false; } - if (this.oldRowCount !== other.oldRowCount) { return false; } - if (this.newStartRow !== other.newStartRow) { return false; } - if (this.newRowCount !== other.newRowCount) { return false; } - if (this.sectionHeading !== other.sectionHeading) { return false; } - - if (this.regions.length !== other.regions.length) { return false; } - if (this.regions.some((region, i) => !region.isEqual(other.regions[i]))) { return false; } - - return true; - } - toStringIn(buffer) { return this.getRegions().reduce((str, region) => str + region.toStringIn(buffer), this.getHeader() + '\n'); } diff --git a/lib/models/patch/index.js b/lib/models/patch/index.js index 596fcfde501..525043dbc41 100644 --- a/lib/models/patch/index.js +++ b/lib/models/patch/index.js @@ -1 +1 @@ -export {default as buildFilePatch} from './builder'; +export {buildFilePatch, buildMultiFilePatch} from './builder'; diff --git a/lib/models/patch/multi-file-patch.js b/lib/models/patch/multi-file-patch.js new file mode 100644 index 00000000000..c329d66ff47 --- /dev/null +++ b/lib/models/patch/multi-file-patch.js @@ -0,0 +1,334 @@ +import {TextBuffer, Range} from 'atom'; + +export default class MultiFilePatch { + constructor({buffer, layers, filePatches}) { + this.buffer = buffer || null; + + this.patchLayer = layers && layers.patch; + this.hunkLayer = layers && layers.hunk; + this.unchangedLayer = layers && layers.unchanged; + this.additionLayer = layers && layers.addition; + this.deletionLayer = layers && layers.deletion; + this.noNewlineLayer = layers && layers.noNewline; + + this.filePatches = filePatches || []; + + this.filePatchesByMarker = new Map(); + this.hunksByMarker = new Map(); + + for (const filePatch of this.filePatches) { + this.filePatchesByMarker.set(filePatch.getMarker(), filePatch); + for (const hunk of filePatch.getHunks()) { + this.hunksByMarker.set(hunk.getMarker(), hunk); + } + } + } + + clone(opts = {}) { + return new this.constructor({ + buffer: opts.buffer !== undefined ? opts.buffer : this.getBuffer(), + layers: opts.layers !== undefined ? opts.layers : { + patch: this.getPatchLayer(), + hunk: this.getHunkLayer(), + unchanged: this.getUnchangedLayer(), + addition: this.getAdditionLayer(), + deletion: this.getDeletionLayer(), + noNewline: this.getNoNewlineLayer(), + }, + filePatches: opts.filePatches !== undefined ? opts.filePatches : this.getFilePatches(), + }); + } + + getBuffer() { + return this.buffer; + } + + getPatchLayer() { + return this.patchLayer; + } + + getHunkLayer() { + return this.hunkLayer; + } + + getUnchangedLayer() { + return this.unchangedLayer; + } + + getAdditionLayer() { + return this.additionLayer; + } + + getDeletionLayer() { + return this.deletionLayer; + } + + getNoNewlineLayer() { + return this.noNewlineLayer; + } + + getFilePatches() { + return this.filePatches; + } + + getPathSet() { + return this.getFilePatches().reduce((pathSet, filePatch) => { + for (const file of [filePatch.getOldFile(), filePatch.getNewFile()]) { + if (file.isPresent()) { + pathSet.add(file.getPath()); + } + } + return pathSet; + }, new Set()); + } + + getFilePatchAt(bufferRow) { + if (bufferRow < 0) { + return undefined; + } + const [marker] = this.patchLayer.findMarkers({intersectsRow: bufferRow}); + return this.filePatchesByMarker.get(marker); + } + + getHunkAt(bufferRow) { + if (bufferRow < 0) { + return undefined; + } + const [marker] = this.hunkLayer.findMarkers({intersectsRow: bufferRow}); + return this.hunksByMarker.get(marker); + } + + getStagePatchForLines(selectedLineSet) { + const nextLayeredBuffer = this.buildLayeredBuffer(); + const nextFilePatches = this.getFilePatchesContaining(selectedLineSet).map(fp => { + return fp.buildStagePatchForLines(this.getBuffer(), nextLayeredBuffer, selectedLineSet); + }); + return this.clone({...nextLayeredBuffer, filePatches: nextFilePatches}); + } + + getStagePatchForHunk(hunk) { + return this.getStagePatchForLines(new Set(hunk.getBufferRows())); + } + + getUnstagePatchForLines(selectedLineSet) { + const nextLayeredBuffer = this.buildLayeredBuffer(); + const nextFilePatches = this.getFilePatchesContaining(selectedLineSet).map(fp => { + return fp.buildUnstagePatchForLines(this.getBuffer(), nextLayeredBuffer, selectedLineSet); + }); + return this.clone({...nextLayeredBuffer, filePatches: nextFilePatches}); + } + + getUnstagePatchForHunk(hunk) { + return this.getUnstagePatchForLines(new Set(hunk.getBufferRows())); + } + + getNextSelectionRange(lastMultiFilePatch, lastSelectedRows) { + if (lastSelectedRows.size === 0) { + const [firstPatch] = this.getFilePatches(); + if (!firstPatch) { + return Range.fromObject([[0, 0], [0, 0]]); + } + + return firstPatch.getFirstChangeRange(); + } + + const lastMax = Math.max(...lastSelectedRows); + + let lastSelectionIndex = 0; + // counts unselected lines in changed regions from the old patch + // until we get to the bottom-most selected line from the old patch (lastMax). + patchLoop: for (const lastFilePatch of lastMultiFilePatch.getFilePatches()) { + for (const hunk of lastFilePatch.getHunks()) { + let includesMax = false; + + for (const change of hunk.getChanges()) { + for (const {intersection, gap} of change.intersectRows(lastSelectedRows, true)) { + // Only include a partial range if this intersection includes the last selected buffer row. + includesMax = intersection.intersectsRow(lastMax); + const delta = includesMax ? lastMax - intersection.start.row + 1 : intersection.getRowCount(); + + if (gap) { + // Range of unselected changes. + lastSelectionIndex += delta; + } + + if (includesMax) { + break patchLoop; + } + } + } + } + } + + // Iterate over changed lines in new patch in order to find the + // new row to be selected based on the last selection index. + // As we walk through the changed lines, we whittle down the + // remaining lines until we reach the row that corresponds to the + // last selected index + + let newSelectionRow = 0; + let remainingChangedLines = lastSelectionIndex; + + let foundRow = false; + let lastChangedRow; + + patchLoop: for (const filePatch of this.getFilePatches()) { + for (const hunk of filePatch.getHunks()) { + for (const change of hunk.getChanges()) { + if (remainingChangedLines < change.bufferRowCount()) { + newSelectionRow = change.getStartBufferRow() + remainingChangedLines; + foundRow = true; + break patchLoop; + } else { + remainingChangedLines -= change.bufferRowCount(); + lastChangedRow = change.getEndBufferRow(); + } + } + } + } + + // If we never got to the last selected index, that means it is + // no longer present in the new patch (ie. we staged the last line of the file). + // In this case we want the next selected line to be the last changed row in the file + if (!foundRow) { + newSelectionRow = lastChangedRow; + } + + return Range.fromObject([[newSelectionRow, 0], [newSelectionRow, Infinity]]); + } + + adoptBufferFrom(lastMultiFilePatch) { + lastMultiFilePatch.getPatchLayer().clear(); + lastMultiFilePatch.getHunkLayer().clear(); + lastMultiFilePatch.getUnchangedLayer().clear(); + lastMultiFilePatch.getAdditionLayer().clear(); + lastMultiFilePatch.getDeletionLayer().clear(); + lastMultiFilePatch.getNoNewlineLayer().clear(); + + this.filePatchesByMarker.clear(); + this.hunksByMarker.clear(); + + const nextBuffer = lastMultiFilePatch.getBuffer(); + nextBuffer.setText(this.getBuffer().getText()); + + for (const filePatch of this.getFilePatches()) { + filePatch.getPatch().reMarkOn(lastMultiFilePatch.getPatchLayer()); + this.filePatchesByMarker.set(filePatch.getMarker(), filePatch); + + for (const hunk of filePatch.getHunks()) { + hunk.reMarkOn(lastMultiFilePatch.getHunkLayer()); + this.hunksByMarker.set(hunk.getMarker(), hunk); + + for (const region of hunk.getRegions()) { + const target = region.when({ + unchanged: () => lastMultiFilePatch.getUnchangedLayer(), + addition: () => lastMultiFilePatch.getAdditionLayer(), + deletion: () => lastMultiFilePatch.getDeletionLayer(), + nonewline: () => lastMultiFilePatch.getNoNewlineLayer(), + }); + region.reMarkOn(target); + } + } + } + + this.patchLayer = lastMultiFilePatch.getPatchLayer(); + this.hunkLayer = lastMultiFilePatch.getHunkLayer(); + this.unchangedLayer = lastMultiFilePatch.getUnchangedLayer(); + this.additionLayer = lastMultiFilePatch.getAdditionLayer(); + this.deletionLayer = lastMultiFilePatch.getDeletionLayer(); + this.noNewlineLayer = lastMultiFilePatch.getNoNewlineLayer(); + + this.buffer = nextBuffer; + } + + buildLayeredBuffer() { + const buffer = new TextBuffer(); + buffer.retain(); + + return { + buffer, + layers: { + patch: buffer.addMarkerLayer(), + hunk: buffer.addMarkerLayer(), + unchanged: buffer.addMarkerLayer(), + addition: buffer.addMarkerLayer(), + deletion: buffer.addMarkerLayer(), + noNewline: buffer.addMarkerLayer(), + }, + }; + } + + /* + * Efficiently locate the FilePatch instances that contain at least one row from a Set. + */ + getFilePatchesContaining(rowSet) { + const sortedRowSet = Array.from(rowSet); + sortedRowSet.sort((a, b) => a - b); + + const filePatches = []; + let lastFilePatch = null; + for (const row of sortedRowSet) { + // Because the rows are sorted, consecutive rows will almost certainly belong to the same patch, so we can save + // many avoidable marker index lookups by comparing with the last. + if (lastFilePatch && lastFilePatch.containsRow(row)) { + continue; + } + + lastFilePatch = this.getFilePatchAt(row); + filePatches.push(lastFilePatch); + } + + return filePatches; + } + + anyPresent() { + return this.buffer !== null && this.filePatches.some(fp => fp.isPresent()); + } + + didAnyChangeExecutableMode() { + for (const filePatch of this.getFilePatches()) { + if (filePatch.didChangeExecutableMode()) { + return true; + } + } + return false; + } + + anyHaveTypechange() { + return this.getFilePatches().some(fp => fp.hasTypechange()); + } + + getMaxLineNumberWidth() { + return this.getFilePatches().reduce((maxWidth, filePatch) => { + const width = filePatch.getMaxLineNumberWidth(); + return maxWidth >= width ? maxWidth : width; + }, 0); + } + + spansMultipleFiles(rows) { + let lastFilePatch = null; + for (const row of rows) { + if (lastFilePatch) { + if (lastFilePatch.containsRow(row)) { + continue; + } + + return true; + } else { + lastFilePatch = this.getFilePatchAt(row); + } + } + return false; + } + + /* + * Construct an apply-able patch String. + */ + toString() { + return this.filePatches.map(fp => fp.toStringIn(this.buffer)).join(''); + } + + isEqual(other) { + return this.toString() === other.toString(); + } +} diff --git a/lib/models/patch/patch.js b/lib/models/patch/patch.js index 2a924f85f20..0fdafa0011e 100644 --- a/lib/models/patch/patch.js +++ b/lib/models/patch/patch.js @@ -1,4 +1,4 @@ -import {TextBuffer} from 'atom'; +import {TextBuffer, Range} from 'atom'; import Hunk from './hunk'; import {Unchanged, Addition, Deletion, NoNewline} from './region'; @@ -8,19 +8,11 @@ export default class Patch { return new NullPatch(); } - constructor({status, hunks, buffer, layers}) { + constructor({status, hunks, marker}) { this.status = status; this.hunks = hunks; - this.buffer = buffer; + this.marker = marker; - this.hunkLayer = layers.hunk; - this.unchangedLayer = layers.unchanged; - this.additionLayer = layers.addition; - this.deletionLayer = layers.deletion; - this.noNewlineLayer = layers.noNewline; - - this.buffer.retain(); - this.hunksByMarker = new Map(this.getHunks().map(hunk => [hunk.getMarker(), hunk])); this.changedLineCount = this.getHunks().reduce((acc, hunk) => acc + hunk.changedLineCount(), 0); } @@ -28,40 +20,33 @@ export default class Patch { return this.status; } - getHunks() { - return this.hunks; + getMarker() { + return this.marker; } - getBuffer() { - return this.buffer; + getRange() { + return this.getMarker().getRange(); } - getHunkLayer() { - return this.hunkLayer; + getStartRange() { + const startPoint = this.getMarker().getRange().start; + return Range.fromObject([startPoint, startPoint]); } - getUnchangedLayer() { - return this.unchangedLayer; - } - - getAdditionLayer() { - return this.additionLayer; - } - - getDeletionLayer() { - return this.deletionLayer; + getHunks() { + return this.hunks; } - getNoNewlineLayer() { - return this.noNewlineLayer; + getChangedLineCount() { + return this.changedLineCount; } - getByteSize() { - return Buffer.byteLength(this.buffer.getText(), 'utf8'); + containsRow(row) { + return this.marker.getRange().intersectsRow(row); } - getChangedLineCount() { - return this.changedLineCount; + reMarkOn(markable) { + this.marker = markable.markRange(this.getRange(), {invalidate: 'never', exclusive: false}); } getMaxLineNumberWidth() { @@ -69,28 +54,17 @@ export default class Patch { return lastHunk ? lastHunk.getMaxLineNumberWidth() : 0; } - getHunkAt(bufferRow) { - const [marker] = this.hunkLayer.findMarkers({intersectsRow: bufferRow}); - return this.hunksByMarker.get(marker); - } - clone(opts = {}) { return new this.constructor({ status: opts.status !== undefined ? opts.status : this.getStatus(), hunks: opts.hunks !== undefined ? opts.hunks : this.getHunks(), - buffer: opts.buffer !== undefined ? opts.buffer : this.getBuffer(), - layers: opts.layers !== undefined ? opts.layers : { - hunk: this.getHunkLayer(), - unchanged: this.getUnchangedLayer(), - addition: this.getAdditionLayer(), - deletion: this.getDeletionLayer(), - noNewline: this.getNoNewlineLayer(), - }, + marker: opts.marker !== undefined ? opts.marker : this.getMarker(), }); } - getStagePatchForLines(rowSet) { - const builder = new BufferBuilder(this.getBuffer()); + buildStagePatchForLines(originalBuffer, nextLayeredBuffer, rowSet) { + const originalBaseOffset = this.getMarker().getRange().start.row; + const builder = new BufferBuilder(originalBuffer, originalBaseOffset, nextLayeredBuffer); const hunks = []; let newRowDelta = 0; @@ -167,13 +141,18 @@ export default class Patch { } } + const buffer = builder.getBuffer(); + const layers = builder.getLayers(); + const marker = layers.patch.markRange([[0, 0], [buffer.getLastRow() - 1, Infinity]]); + const wholeFile = rowSet.size === this.changedLineCount; const status = this.getStatus() === 'deleted' && !wholeFile ? 'modified' : this.getStatus(); - return this.clone({hunks, status, buffer: builder.getBuffer(), layers: builder.getLayers()}); + return this.clone({hunks, status, marker}); } - getUnstagePatchForLines(rowSet) { - const builder = new BufferBuilder(this.getBuffer()); + buildUnstagePatchForLines(originalBuffer, nextLayeredBuffer, rowSet) { + const originalBaseOffset = this.getMarker().getRange().start.row; + const builder = new BufferBuilder(originalBuffer, originalBaseOffset, nextLayeredBuffer); const hunks = []; let newRowDelta = 0; @@ -257,180 +236,76 @@ export default class Patch { status = 'added'; } - return this.clone({hunks, status, buffer: builder.getBuffer(), layers: builder.getLayers()}); + const buffer = builder.getBuffer(); + const layers = builder.getLayers(); + const marker = layers.patch.markRange([[0, 0], [buffer.getLastRow(), Infinity]]); + + return this.clone({hunks, status, marker}); } getFirstChangeRange() { const firstHunk = this.getHunks()[0]; if (!firstHunk) { - return [[0, 0], [0, 0]]; + return Range.fromObject([[0, 0], [0, 0]]); } const firstChange = firstHunk.getChanges()[0]; if (!firstChange) { - return [[0, 0], [0, 0]]; + return Range.fromObject([[0, 0], [0, 0]]); } const firstRow = firstChange.getStartBufferRow(); - return [[firstRow, 0], [firstRow, Infinity]]; - } - - getNextSelectionRange(lastPatch, lastSelectedRows) { - if (lastSelectedRows.size === 0) { - return this.getFirstChangeRange(); - } - - const lastMax = Math.max(...lastSelectedRows); - - let lastSelectionIndex = 0; - for (const hunk of lastPatch.getHunks()) { - let includesMax = false; - let hunkSelectionOffset = 0; - - changeLoop: for (const change of hunk.getChanges()) { - for (const {intersection, gap} of change.intersectRows(lastSelectedRows, true)) { - // Only include a partial range if this intersection includes the last selected buffer row. - includesMax = intersection.intersectsRow(lastMax); - const delta = includesMax ? lastMax - intersection.start.row + 1 : intersection.getRowCount(); - - if (gap) { - // Range of unselected changes. - hunkSelectionOffset += delta; - } - - if (includesMax) { - break changeLoop; - } - } - } - - lastSelectionIndex += hunkSelectionOffset; - - if (includesMax) { - break; - } - } - - let newSelectionRow = 0; - hunkLoop: for (const hunk of this.getHunks()) { - for (const change of hunk.getChanges()) { - if (lastSelectionIndex < change.bufferRowCount()) { - newSelectionRow = change.getStartBufferRow() + lastSelectionIndex; - break hunkLoop; - } else { - lastSelectionIndex -= change.bufferRowCount(); - } - } - } - - return [[newSelectionRow, 0], [newSelectionRow, Infinity]]; - } - - adoptBufferFrom(lastPatch) { - lastPatch.getHunkLayer().clear(); - lastPatch.getUnchangedLayer().clear(); - lastPatch.getAdditionLayer().clear(); - lastPatch.getDeletionLayer().clear(); - lastPatch.getNoNewlineLayer().clear(); - - const nextBuffer = lastPatch.getBuffer(); - nextBuffer.setText(this.getBuffer().getText()); - - for (const hunk of this.getHunks()) { - hunk.reMarkOn(lastPatch.getHunkLayer()); - for (const region of hunk.getRegions()) { - const target = region.when({ - unchanged: () => lastPatch.getUnchangedLayer(), - addition: () => lastPatch.getAdditionLayer(), - deletion: () => lastPatch.getDeletionLayer(), - nonewline: () => lastPatch.getNoNewlineLayer(), - }); - region.reMarkOn(target); - } - } - - this.hunkLayer = lastPatch.getHunkLayer(); - this.unchangedLayer = lastPatch.getUnchangedLayer(); - this.additionLayer = lastPatch.getAdditionLayer(); - this.deletionLayer = lastPatch.getDeletionLayer(); - this.noNewlineLayer = lastPatch.getNoNewlineLayer(); - - this.buffer = nextBuffer; - this.hunksByMarker = new Map(this.getHunks().map(hunk => [hunk.getMarker(), hunk])); + return Range.fromObject([[firstRow, 0], [firstRow, Infinity]]); } - toString() { - return this.getHunks().reduce((str, hunk) => str + hunk.toStringIn(this.getBuffer()), ''); + toStringIn(buffer) { + return this.getHunks().reduce((str, hunk) => str + hunk.toStringIn(buffer), ''); } isPresent() { return true; } - - isEqual(other) { - if (this === other) { return true; } - - if (!other.isPresent()) { return false; } - if (this.status !== other.status) { return false; } - if (this.changedLineCount !== other.changedLineCount) { return false; } - - if (this.hunks.length !== other.hunks.length) { return false; } - if (this.hunks.some((hunk, i) => !hunk.isEqual(other.hunks[i]))) { return false; } - if (this.buffer.getText() !== other.buffer.getText()) { return false; } - - return true; - } } class NullPatch { constructor() { - this.buffer = new TextBuffer(); - this.hunkLayer = this.buffer.addMarkerLayer(); - this.unchangedLayer = this.buffer.addMarkerLayer(); - this.additionLayer = this.buffer.addMarkerLayer(); - this.deletionLayer = this.buffer.addMarkerLayer(); - this.noNewlineLayer = this.buffer.addMarkerLayer(); - - this.buffer.retain(); + const buffer = new TextBuffer(); + this.marker = buffer.markRange([[0, 0], [0, 0]]); } getStatus() { return null; } - getHunks() { - return []; + getMarker() { + return this.marker; } - getBuffer() { - return this.buffer; + getRange() { + return this.getMarker().getRange(); } - getHunkLayer() { - return this.hunkLayer; + getStartRange() { + return Range.fromObject([[0, 0], [0, 0]]); } - getUnchangedLayer() { - return this.unchangedLayer; - } - - getAdditionLayer() { - return this.additionLayer; + getHunks() { + return []; } - getDeletionLayer() { - return this.deletionLayer; + getChangedLineCount() { + return 0; } - getNoNewlineLayer() { - return this.noNewlineLayer; + containsRow() { + return false; } - getByteSize() { - return 0; + reMarkOn(markable) { + this.marker = markable.markRange(this.getRange(), {invalidate: 'never', exclusive: false}); } - getChangedLineCount() { + getMaxLineNumberWidth() { return 0; } @@ -438,98 +313,61 @@ class NullPatch { if ( opts.status === undefined && opts.hunks === undefined && - opts.buffer === undefined && - opts.layers === undefined + opts.marker === undefined ) { return this; } else { return new Patch({ status: opts.status !== undefined ? opts.status : this.getStatus(), hunks: opts.hunks !== undefined ? opts.hunks : this.getHunks(), - buffer: opts.buffer !== undefined ? opts.buffer : this.getBuffer(), - layers: opts.layers !== undefined ? opts.layers : { - hunk: this.getHunkLayer(), - unchanged: this.getUnchangedLayer(), - addition: this.getAdditionLayer(), - deletion: this.getDeletionLayer(), - noNewline: this.getNoNewlineLayer(), - }, + marker: opts.marker !== undefined ? opts.marker : this.getMarker(), }); } } - getStagePatchForLines() { + buildStagePatchForLines() { return this; } - getUnstagePatchForLines() { + buildUnstagePatchForLines() { return this; } getFirstChangeRange() { - return [[0, 0], [0, 0]]; - } - - getNextSelectionRange() { - return [[0, 0], [0, 0]]; - } - - adoptBufferFrom(lastPatch) { - lastPatch.getHunkLayer().clear(); - lastPatch.getUnchangedLayer().clear(); - lastPatch.getAdditionLayer().clear(); - lastPatch.getDeletionLayer().clear(); - lastPatch.getNoNewlineLayer().clear(); - - const nextBuffer = lastPatch.getBuffer(); - nextBuffer.setText(''); - - this.hunkLayer = lastPatch.getHunkLayer(); - this.unchangedLayer = lastPatch.getUnchangedLayer(); - this.additionLayer = lastPatch.getAdditionLayer(); - this.deletionLayer = lastPatch.getDeletionLayer(); - this.noNewlineLayer = lastPatch.getNoNewlineLayer(); - - this.buffer.release(); - this.buffer = nextBuffer; + return Range.fromObject([[0, 0], [0, 0]]); } - getMaxLineNumberWidth() { - return 0; - } - - getHunkAt(bufferRow) { - return undefined; - } - - toString() { + toStringIn() { return ''; } isPresent() { return false; } - - isEqual(other) { - return !other.isPresent(); - } } class BufferBuilder { - constructor(original) { + constructor(original, originalBaseOffset, nextLayeredBuffer) { this.originalBuffer = original; - this.buffer = new TextBuffer(); - this.buffer.retain(); - this.layers = new Map( - [Unchanged, Addition, Deletion, NoNewline, 'hunk'].map(key => { - return [key, this.buffer.addMarkerLayer()]; - }), - ); - this.offset = 0; + + this.buffer = nextLayeredBuffer.buffer; + this.layers = new Map([ + [Unchanged, nextLayeredBuffer.layers.unchanged], + [Addition, nextLayeredBuffer.layers.addition], + [Deletion, nextLayeredBuffer.layers.deletion], + [NoNewline, nextLayeredBuffer.layers.noNewline], + ['hunk', nextLayeredBuffer.layers.hunk], + ['patch', nextLayeredBuffer.layers.patch], + ]); + + // The ranges provided to builder methods are expected to be valid within the original buffer. Account for + // the position of the Patch within its original TextBuffer, and any existing content already on the next + // TextBuffer. + this.offset = this.buffer.getLastRow() - originalBaseOffset; this.hunkBufferText = ''; this.hunkRowCount = 0; - this.hunkStartOffset = 0; + this.hunkStartOffset = this.offset; this.hunkRegions = []; this.hunkRange = null; @@ -605,6 +443,7 @@ class BufferBuilder { getLayers() { return { + patch: this.layers.get('patch'), hunk: this.layers.get('hunk'), unchanged: this.layers.get(Unchanged), addition: this.layers.get(Addition), diff --git a/lib/models/patch/region.js b/lib/models/patch/region.js index 7f7f15c1cb7..3afaef844d1 100644 --- a/lib/models/patch/region.js +++ b/lib/models/patch/region.js @@ -25,6 +25,16 @@ class Region { return this.getRange().intersectsRow(row); } + /* + * intersectRows breaks a Region into runs of rows that are included in + * rowSet and rows that are not. For example: + * @this Region row 10-20 + * @param rowSet row 11, 12, 13, 17, 19 + * @param includeGaps true (whether the result will include gaps or not) + * @return an array of regions like this: + * (10, gap = true) (11, 12, 13, gap = false) (14, 15, 16, gap = true) + * (17, gap = false) (18, gap = true) (19, gap = false) (20, gap = true) + */ intersectRows(rowSet, includeGaps) { const intersections = []; let withinIntersection = false; @@ -107,22 +117,9 @@ class Region { buffer.lineEndingForRow(this.getRange().end.row); } - invertIn() { - return this; - } - isChange() { return true; } - - isEqual(other) { - if (this === other) { return true; } - - if (this.constructor.origin !== other.constructor.origin) { return false; } - if (!this.getRange().isEqual(other.getRange())) { return false; } - - return true; - } } export class Addition extends Region { diff --git a/lib/models/repository-states/present.js b/lib/models/repository-states/present.js index 77c242c866e..a9f9691a2ab 100644 --- a/lib/models/repository-states/present.js +++ b/lib/models/repository-states/present.js @@ -6,7 +6,7 @@ import State from './state'; import {LargeRepoError} from '../../git-shell-out-strategy'; import {FOCUS} from '../workspace-change-observer'; -import {buildFilePatch} from '../patch'; +import {buildFilePatch, buildMultiFilePatch} from '../patch'; import DiscardHistory from '../discard-history'; import Branch, {nullBranch} from '../branch'; import Author from '../author'; @@ -119,7 +119,7 @@ export default class Present extends State { const includes = (...segments) => fullPath.includes(path.join(...segments)); if (filePathEndsWith(fullPath, '.git', 'index')) { - keys.add(Keys.stagedChangesSinceParentCommit); + keys.add(Keys.stagedChanges); keys.add(Keys.filePatch.all); keys.add(Keys.index.all); keys.add(Keys.statusBundle); @@ -184,6 +184,10 @@ export default class Present extends State { for (let i = 0; i < events.length; i++) { const event = events[i]; + if (!event.path) { + continue; + } + if (filePathEndsWith(event.path, '.git', 'MERGE_HEAD')) { if (event.action === 'created') { if (this.isCommitMessageClean()) { @@ -270,21 +274,21 @@ export default class Present extends State { ); } - applyPatchToIndex(filePatch) { + applyPatchToIndex(multiFilePatch) { return this.invalidate( - () => Keys.cacheOperationKeys([filePatch.getOldPath(), filePatch.getNewPath()]), + () => Keys.cacheOperationKeys(Array.from(multiFilePatch.getPathSet())), () => { - const patchStr = filePatch.toString(); + const patchStr = multiFilePatch.toString(); return this.git().applyPatch(patchStr, {index: true}); }, ); } - applyPatchToWorkdir(filePatch) { + applyPatchToWorkdir(multiFilePatch) { return this.invalidate( - () => Keys.workdirOperationKeys([filePatch.getOldPath(), filePatch.getNewPath()]), + () => Keys.workdirOperationKeys(Array.from(multiFilePatch.getPathSet())), () => { - const patchStr = filePatch.toString(); + const patchStr = multiFilePatch.toString(); return this.git().applyPatch(patchStr); }, ); @@ -299,6 +303,7 @@ export default class Present extends State { ...Keys.filePatch.eachWithOpts({staged: true}), Keys.headDescription, Keys.branches, + Keys.stagedChanges, ], // eslint-disable-next-line no-shadow () => this.executePipelineAction('COMMIT', async (message, options = {}) => { @@ -344,7 +349,7 @@ export default class Present extends State { return this.invalidate( () => [ Keys.statusBundle, - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, Keys.filePatch.all, Keys.index.all, ], @@ -367,7 +372,7 @@ export default class Present extends State { return this.invalidate( () => [ Keys.statusBundle, - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, ...Keys.filePatch.eachWithFileOpts([filePath], [{staged: false}, {staged: true}]), Keys.index.oneWith(filePath), ], @@ -380,7 +385,7 @@ export default class Present extends State { checkout(revision, options = {}) { return this.invalidate( () => [ - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, Keys.lastCommit, Keys.recentCommits, Keys.authors, @@ -401,7 +406,7 @@ export default class Present extends State { return this.invalidate( () => [ Keys.statusBundle, - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, ...paths.map(fileName => Keys.index.oneWith(fileName)), ...Keys.filePatch.eachWithFileOpts(paths, [{staged: true}]), ], @@ -414,7 +419,7 @@ export default class Present extends State { undoLastCommit() { return this.invalidate( () => [ - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, Keys.lastCommit, Keys.recentCommits, Keys.authors, @@ -697,6 +702,12 @@ export default class Present extends State { }); } + getStagedChangesPatch() { + return this.cache.getOrSet(Keys.stagedChanges, () => { + return this.git().getStagedChangesPatch().then(buildMultiFilePatch); + }); + } + readFileFromIndex(filePath) { return this.cache.getOrSet(Keys.index.oneWith(filePath), () => { return this.git().readFileFromIndex(filePath); @@ -1021,7 +1032,7 @@ class GroupKey { const Keys = { statusBundle: new CacheKey('status-bundle'), - stagedChangesSinceParentCommit: new CacheKey('staged-changes-since-parent-commit'), + stagedChanges: new CacheKey('staged-changes'), filePatch: { _optKey: ({staged}) => { @@ -1100,12 +1111,12 @@ const Keys = { ...Keys.workdirOperationKeys(fileNames), ...Keys.filePatch.eachWithFileOpts(fileNames, [{staged: true}]), ...fileNames.map(Keys.index.oneWith), - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, ], headOperationKeys: () => [ ...Keys.filePatch.eachWithOpts({staged: true}), - Keys.stagedChangesSinceParentCommit, + Keys.stagedChanges, Keys.lastCommit, Keys.recentCommits, Keys.authors, diff --git a/lib/models/repository-states/state.js b/lib/models/repository-states/state.js index 7e3e88988f7..3b1775ec3e2 100644 --- a/lib/models/repository-states/state.js +++ b/lib/models/repository-states/state.js @@ -2,7 +2,7 @@ import {nullCommit} from '../commit'; import BranchSet from '../branch-set'; import RemoteSet from '../remote-set'; import {nullOperationStates} from '../operation-states'; -import FilePatch from '../patch/file-patch'; +import MultiFilePatch from '../patch/multi-file-patch'; /** * Map of registered subclasses to allow states to transition to one another without circular dependencies. @@ -275,7 +275,11 @@ export default class State { } getFilePatchForPath(filePath, options = {}) { - return Promise.resolve(FilePatch.createNull()); + return Promise.resolve(new MultiFilePatch({})); + } + + getStagedChangesPatch() { + return Promise.resolve(new MultiFilePatch({})); } readFileFromIndex(filePath) { diff --git a/lib/models/repository.js b/lib/models/repository.js index 8dc10b022a3..06b388d654a 100644 --- a/lib/models/repository.js +++ b/lib/models/repository.js @@ -326,6 +326,7 @@ const delegates = [ 'getStatusBundle', 'getStatusesForChangedFiles', 'getFilePatchForPath', + 'getStagedChangesPatch', 'readFileFromIndex', 'getLastCommit', diff --git a/lib/prop-types.js b/lib/prop-types.js index 13d0b4d394c..f00c709f8f3 100644 --- a/lib/prop-types.js +++ b/lib/prop-types.js @@ -130,6 +130,10 @@ export const FilePatchItemPropType = PropTypes.shape({ status: PropTypes.string.isRequired, }); +export const MultiFilePatchPropType = PropTypes.shape({ + getFilePatches: PropTypes.func.isRequired, +}); + const statusNames = [ 'added', 'deleted', diff --git a/lib/views/commit-view.js b/lib/views/commit-view.js index 5b0560c5890..6013162cfaf 100644 --- a/lib/views/commit-view.js +++ b/lib/views/commit-view.js @@ -23,6 +23,7 @@ let FakeKeyDownEvent; export default class CommitView extends React.Component { static focus = { + COMMIT_PREVIEW_BUTTON: Symbol('commit-preview-button'), EDITOR: Symbol('commit-editor'), COAUTHOR_INPUT: Symbol('coauthor-input'), ABORT_MERGE_BUTTON: Symbol('commit-abort-merge-button'), @@ -41,6 +42,7 @@ export default class CommitView extends React.Component { mergeConflictsExist: PropTypes.bool.isRequired, stagedChangesExist: PropTypes.bool.isRequired, isCommitting: PropTypes.bool.isRequired, + commitPreviewActive: PropTypes.bool.isRequired, deactivateCommitBox: PropTypes.bool.isRequired, maximumCharacterLimit: PropTypes.number.isRequired, messageBuffer: PropTypes.object.isRequired, // FIXME more specific proptype @@ -51,6 +53,8 @@ export default class CommitView extends React.Component { abortMerge: PropTypes.func.isRequired, prepareToCommit: PropTypes.func.isRequired, toggleExpandedCommitMessageEditor: PropTypes.func.isRequired, + toggleCommitPreview: PropTypes.func.isRequired, + activateCommitPreview: PropTypes.func.isRequired, }; constructor(props, context) { @@ -73,6 +77,7 @@ export default class CommitView extends React.Component { this.subscriptions = new CompositeDisposable(); this.refRoot = new RefHolder(); + this.refCommitPreviewButton = new RefHolder(); this.refExpandButton = new RefHolder(); this.refCommitButton = new RefHolder(); this.refHardWrapButton = new RefHolder(); @@ -157,6 +162,18 @@ export default class CommitView extends React.Component { + + + +
+ +
{this.commitButtonText()} {this.commitIsEnabled(false) && @@ -556,26 +573,38 @@ export default class CommitView extends React.Component { return this.refEditorComponent.map(editor => editor.contains(document.activeElement)).getOr(false); } - rememberFocus(event) { - if (this.refEditorComponent.map(editor => editor.contains(event.target)).getOr(false)) { + hasFocusAtBeginning() { + return this.refCommitPreviewButton.map(button => button.contains(document.activeElement)).getOr(false); + } + + getFocus(element = document.activeElement) { + if (this.refCommitPreviewButton.map(button => button.contains(element)).getOr(false)) { + return CommitView.focus.COMMIT_PREVIEW_BUTTON; + } + + if (this.refEditorComponent.map(editor => editor.contains(element)).getOr(false)) { return CommitView.focus.EDITOR; } - if (this.refAbortMergeButton.map(e => e.contains(event.target)).getOr(false)) { + if (this.refAbortMergeButton.map(e => e.contains(element)).getOr(false)) { return CommitView.focus.ABORT_MERGE_BUTTON; } - if (this.refCommitButton.map(e => e.contains(event.target)).getOr(false)) { + if (this.refCommitButton.map(e => e.contains(element)).getOr(false)) { return CommitView.focus.COMMIT_BUTTON; } - if (this.refCoAuthorSelect.map(c => c.wrapper && c.wrapper.contains(event.target)).getOr(false)) { + if (this.refCoAuthorSelect.map(c => c.wrapper && c.wrapper.contains(element)).getOr(false)) { return CommitView.focus.COAUTHOR_INPUT; } return null; } + rememberFocus(event) { + return this.getFocus(event.target); + } + setFocus(focus) { let fallback = false; const focusElement = element => { @@ -583,6 +612,12 @@ export default class CommitView extends React.Component { return true; }; + if (focus === CommitView.focus.COMMIT_PREVIEW_BUTTON) { + if (this.refCommitPreviewButton.map(focusElement).getOr(false)) { + return true; + } + } + if (focus === CommitView.focus.EDITOR) { if (this.refEditorComponent.map(focusElement).getOr(false)) { if (this.props.messageBuffer.getText().length > 0 && !this.isValidMessage()) { @@ -621,4 +656,78 @@ export default class CommitView extends React.Component { return false; } + + advanceFocus(event) { + const f = this.constructor.focus; + const current = this.getFocus(); + if (current === f.EDITOR) { + // Let the editor handle it + return true; + } + + let next = null; + switch (current) { + case f.COMMIT_PREVIEW_BUTTON: + next = f.EDITOR; + break; + case f.COAUTHOR_INPUT: + next = this.props.isMerging ? f.ABORT_MERGE_BUTTON : f.COMMIT_BUTTON; + break; + case f.ABORT_MERGE_BUTTON: + next = f.COMMIT_BUTTON; + break; + case f.COMMIT_BUTTON: + // End of tab navigation. Prevent cycling. + event.stopPropagation(); + return true; + } + + if (next !== null) { + this.setFocus(next); + event.stopPropagation(); + + return true; + } else { + return false; + } + } + + retreatFocus(event) { + const f = this.constructor.focus; + const current = this.getFocus(); + + let next = null; + switch (current) { + case f.COMMIT_BUTTON: + if (this.props.isMerging) { + next = f.ABORT_MERGE_BUTTON; + } else if (this.state.showCoAuthorInput) { + next = f.COAUTHOR_INPUT; + } else { + next = f.EDITOR; + } + break; + case f.ABORT_MERGE_BUTTON: + next = this.state.showCoAuthorInput ? f.COAUTHOR_INPUT : f.EDITOR; + break; + case f.COAUTHOR_INPUT: + next = f.EDITOR; + break; + case f.EDITOR: + next = f.COMMIT_PREVIEW_BUTTON; + break; + case f.COMMIT_PREVIEW_BUTTON: + // Allow the GitTabView to retreat focus back to the last StagingView list. + return false; + } + + if (next !== null) { + this.setFocus(next); + event.stopPropagation(); + + return true; + } else { + return false; + } + } } diff --git a/lib/views/file-patch-header-view.js b/lib/views/file-patch-header-view.js index 3514460e6b7..8f7db7d8e2b 100644 --- a/lib/views/file-patch-header-view.js +++ b/lib/views/file-patch-header-view.js @@ -1,17 +1,21 @@ +import path from 'path'; + import React, {Fragment} from 'react'; import PropTypes from 'prop-types'; import cx from 'classnames'; import RefHolder from '../models/ref-holder'; -import Tooltip from '../atom/tooltip'; +import ChangedFileItem from '../items/changed-file-item'; +import CommitPreviewItem from '../items/commit-preview-item'; export default class FilePatchHeaderView extends React.Component { static propTypes = { relPath: PropTypes.string.isRequired, stagingStatus: PropTypes.oneOf(['staged', 'unstaged']).isRequired, - isPartiallyStaged: PropTypes.bool.isRequired, + isPartiallyStaged: PropTypes.bool, hasHunks: PropTypes.bool.isRequired, - hasUndoHistory: PropTypes.bool.isRequired, + hasUndoHistory: PropTypes.bool, + hasMultipleFileSelections: PropTypes.bool.isRequired, tooltips: PropTypes.object.isRequired, @@ -19,6 +23,8 @@ export default class FilePatchHeaderView extends React.Component { diveIntoMirrorPatch: PropTypes.func.isRequired, openFile: PropTypes.func.isRequired, toggleFile: PropTypes.func.isRequired, + + itemType: PropTypes.oneOf([ChangedFileItem, CommitPreviewItem]).isRequired, }; constructor(props) { @@ -40,8 +46,29 @@ export default class FilePatchHeaderView extends React.Component { } renderTitle() { - const status = this.props.stagingStatus; - return `${status[0].toUpperCase()}${status.slice(1)} Changes for ${this.props.relPath}`; + if (this.props.itemType === ChangedFileItem) { + const status = this.props.stagingStatus; + return ( + {status[0].toUpperCase()}{status.slice(1)} Changes for {this.renderPath()} + ); + } else { + return this.renderPath(); + } + } + + renderPath() { + const dirname = path.dirname(this.props.relPath); + const basename = path.basename(this.props.relPath); + + if (dirname === '.') { + return {basename}; + } else { + return ( + + {dirname}{path.sep}{basename} + + ); + } } renderButtonGroup() { @@ -56,15 +83,16 @@ export default class FilePatchHeaderView extends React.Component { } renderUndoDiscardButton() { - if (!this.props.hasUndoHistory || this.props.stagingStatus !== 'unstaged') { + const unstagedChangedFileItem = this.props.itemType === ChangedFileItem && this.props.stagingStatus === 'unstaged'; + if (unstagedChangedFileItem && this.props.hasUndoHistory) { + return ( + + ); + } else { return null; } - - return ( - - ); } renderMirrorPatchButton() { @@ -75,11 +103,11 @@ export default class FilePatchHeaderView extends React.Component { const attrs = this.props.stagingStatus === 'unstaged' ? { iconClass: 'icon-tasklist', - tooltipText: 'View staged changes', + buttonText: 'View Staged', } : { iconClass: 'icon-list-unordered', - tooltipText: 'View unstaged changes', + buttonText: 'View Unstaged', }; return ( @@ -87,30 +115,27 @@ export default class FilePatchHeaderView extends React.Component { ); } renderOpenFileButton() { + let buttonText = 'Jump To File'; + if (this.props.hasMultipleFileSelections) { + buttonText += 's'; + } + return ( ); } diff --git a/lib/views/git-tab-view.js b/lib/views/git-tab-view.js index d748e3bf71b..9899daca097 100644 --- a/lib/views/git-tab-view.js +++ b/lib/views/git-tab-view.js @@ -248,30 +248,38 @@ export default class GitTabView extends React.Component { } async advanceFocus(evt) { - // The commit controller manages its own focus - if (this.refCommitController.map(c => c.hasFocus()).getOr(false)) { + // Advance focus within the CommitView if it's there + if (this.refCommitController.map(c => c.advanceFocus(evt)).getOr(false)) { return; } + // Advance focus to the next staging view list, if it's there if (await this.props.refStagingView.map(view => view.activateNextList()).getOr(false)) { evt.stopPropagation(); - } else { - if (this.refCommitController.map(c => c.setFocus(GitTabView.focus.EDITOR)).getOr(false)) { - evt.stopPropagation(); - } + return; + } + + // Advance focus from the staging view lists to the CommitView + if (this.refCommitController.map(c => c.setFocus(GitTabView.focus.COMMIT_PREVIEW_BUTTON)).getOr(false)) { + evt.stopPropagation(); } } async retreatFocus(evt) { - if (this.refCommitController.map(c => c.hasFocus()).getOr(false)) { - // if the commit editor is focused, focus the last staging view list - if (this.refCommitController.map(c => c.hasFocusEditor()).getOr(false) && - await this.props.refStagingView.map(view => view.activateLastList()).getOr(null) - ) { + // Retreat focus within the CommitView if it's there + if (this.refCommitController.map(c => c.retreatFocus(evt)).getOr(false)) { + return; + } + + if (this.refCommitController.map(c => c.hasFocusAtBeginning()).getOr(false)) { + // Retreat focus from the beginning of the CommitView to the end of the StagingView + if (await this.props.refStagingView.map(view => view.activateLastList()).getOr(null)) { this.setFocus(GitTabView.focus.STAGING); evt.stopPropagation(); } } else if (await this.props.refStagingView.map(c => c.activatePreviousList()).getOr(null)) { + // Retreat focus within the StagingView + this.setFocus(GitTabView.focus.STAGING); evt.stopPropagation(); } } @@ -281,6 +289,10 @@ export default class GitTabView extends React.Component { this.setFocus(GitTabView.focus.STAGING); } + focusAndSelectCommitPreviewButton() { + this.setFocus(GitTabView.focus.COMMIT_PREVIEW_BUTTON); + } + quietlySelectItem(filePath, stagingStatus) { return this.props.refStagingView.map(view => view.quietlySelectItem(filePath, stagingStatus)).getOr(false); } diff --git a/lib/views/github-tab-view.js b/lib/views/github-tab-view.js index cac4e26b2ca..4c8bc73a4be 100644 --- a/lib/views/github-tab-view.js +++ b/lib/views/github-tab-view.js @@ -22,7 +22,7 @@ export default class GitHubTabView extends React.Component { remotes: RemoteSetPropType.isRequired, currentRemote: RemotePropType.isRequired, manyRemotesAvailable: PropTypes.bool.isRequired, - aheadCount: PropTypes.number.isRequired, + aheadCount: PropTypes.number, pushInProgress: PropTypes.bool.isRequired, isLoading: PropTypes.bool.isRequired, diff --git a/lib/views/file-patch-view.js b/lib/views/multi-file-patch-view.js similarity index 75% rename from lib/views/file-patch-view.js rename to lib/views/multi-file-patch-view.js index 8736496e217..687412879a3 100644 --- a/lib/views/file-patch-view.js +++ b/lib/views/multi-file-patch-view.js @@ -5,6 +5,7 @@ import {Range} from 'atom'; import {CompositeDisposable} from 'event-kit'; import {autobind} from '../helpers'; +import {RefHolderPropType, MultiFilePatchPropType} from '../prop-types'; import AtomTextEditor from '../atom/atom-text-editor'; import Marker from '../atom/marker'; import MarkerLayer from '../atom/marker-layer'; @@ -15,26 +16,29 @@ import FilePatchHeaderView from './file-patch-header-view'; import FilePatchMetaView from './file-patch-meta-view'; import HunkHeaderView from './hunk-header-view'; import RefHolder from '../models/ref-holder'; +import ChangedFileItem from '../items/changed-file-item'; +import CommitPreviewItem from '../items/commit-preview-item'; +import File from '../models/patch/file'; const executableText = { - 100644: 'non executable', - 100755: 'executable', + [File.modes.NORMAL]: 'non executable', + [File.modes.EXECUTABLE]: 'executable', }; const NBSP_CHARACTER = '\u00a0'; const BLANK_LABEL = () => NBSP_CHARACTER; -export default class FilePatchView extends React.Component { +export default class MultiFilePatchView extends React.Component { static propTypes = { - relPath: PropTypes.string.isRequired, stagingStatus: PropTypes.oneOf(['staged', 'unstaged']).isRequired, - isPartiallyStaged: PropTypes.bool.isRequired, - filePatch: PropTypes.object.isRequired, + isPartiallyStaged: PropTypes.bool, + multiFilePatch: MultiFilePatchPropType.isRequired, selectionMode: PropTypes.oneOf(['hunk', 'line']).isRequired, selectedRows: PropTypes.object.isRequired, + hasMultipleFileSelections: PropTypes.bool.isRequired, repository: PropTypes.object.isRequired, - hasUndoHistory: PropTypes.bool.isRequired, + hasUndoHistory: PropTypes.bool, workspace: PropTypes.object.isRequired, commands: PropTypes.object.isRequired, @@ -45,7 +49,7 @@ export default class FilePatchView extends React.Component { selectedRowsChanged: PropTypes.func.isRequired, diveIntoMirrorPatch: PropTypes.func.isRequired, - surfaceFile: PropTypes.func.isRequired, + surface: PropTypes.func.isRequired, openFile: PropTypes.func.isRequired, toggleFile: PropTypes.func.isRequired, toggleRows: PropTypes.func.isRequired, @@ -53,6 +57,9 @@ export default class FilePatchView extends React.Component { toggleSymlinkChange: PropTypes.func.isRequired, undoLastDiscard: PropTypes.func.isRequired, discardRows: PropTypes.func.isRequired, + + refInitialFocus: RefHolderPropType, + itemType: PropTypes.oneOf([ChangedFileItem, CommitPreviewItem]).isRequired, } constructor(props) { @@ -79,17 +86,24 @@ export default class FilePatchView extends React.Component { this.refEditor.observe(editor => { this.refEditorElement.setter(editor.getElement()); }), + this.refEditorElement.observe(element => { + this.props.refInitialFocus && this.props.refInitialFocus.setter(element); + }), ); } componentDidMount() { window.addEventListener('mouseup', this.didMouseUp); this.refEditor.map(editor => { - const [firstHunk] = this.props.filePatch.getHunks(); - if (firstHunk) { - this.nextSelectionMode = 'hunk'; - editor.setSelectedBufferRange(firstHunk.getRange()); + // this.props.multiFilePatch is guaranteed to contain at least one FilePatch if is rendered. + const [firstPatch] = this.props.multiFilePatch.getFilePatches(); + const [firstHunk] = firstPatch.getHunks(); + if (!firstHunk) { + return null; } + + this.nextSelectionMode = 'hunk'; + editor.setSelectedBufferRange(firstHunk.getRange()); return null; }); @@ -100,22 +114,27 @@ export default class FilePatchView extends React.Component { getSnapshotBeforeUpdate(prevProps) { let newSelectionRange = null; - if (this.props.filePatch !== prevProps.filePatch) { + if (this.props.multiFilePatch !== prevProps.multiFilePatch) { // Heuristically adjust the editor selection based on the old file patch, the old row selection state, and // the incoming patch. - newSelectionRange = this.props.filePatch.getNextSelectionRange( - prevProps.filePatch, + newSelectionRange = this.props.multiFilePatch.getNextSelectionRange( + prevProps.multiFilePatch, prevProps.selectedRows, ); this.suppressChanges = true; - this.props.filePatch.adoptBufferFrom(prevProps.filePatch); + this.props.multiFilePatch.adoptBufferFrom(prevProps.multiFilePatch); this.suppressChanges = false; } return newSelectionRange; } componentDidUpdate(prevProps, prevState, newSelectionRange) { + if (prevProps.refInitialFocus !== this.props.refInitialFocus) { + prevProps.refInitialFocus && prevProps.refInitialFocus.setter(null); + this.props.refInitialFocus && this.refEditorElement.map(this.props.refInitialFocus.setter); + } + if (newSelectionRange) { this.refEditor.map(editor => { if (this.props.selectionMode === 'line') { @@ -124,7 +143,7 @@ export default class FilePatchView extends React.Component { } else { const nextHunks = new Set( Range.fromObject(newSelectionRange).getRows() - .map(row => this.props.filePatch.getHunkAt(row)) + .map(row => this.props.multiFilePatch.getHunkAt(row)) .filter(Boolean), ); const nextRanges = nextHunks.size > 0 @@ -151,53 +170,37 @@ export default class FilePatchView extends React.Component { const rootClass = cx( 'github-FilePatchView', `github-FilePatchView--${this.props.stagingStatus}`, - {'github-FilePatchView--blank': !this.props.filePatch.isPresent()}, + {'github-FilePatchView--blank': !this.props.multiFilePatch.anyPresent()}, {'github-FilePatchView--hunkMode': this.props.selectionMode === 'hunk'}, ); return (
- {this.renderCommands()} - 0} - hasUndoHistory={this.props.hasUndoHistory} - - tooltips={this.props.tooltips} - - undoLastDiscard={this.undoLastDiscardFromButton} - diveIntoMirrorPatch={this.props.diveIntoMirrorPatch} - openFile={this.didOpenFile} - toggleFile={this.props.toggleFile} - /> -
- {this.props.filePatch.isPresent() ? this.renderNonEmptyPatch() : this.renderEmptyPatch()} + {this.props.multiFilePatch.anyPresent() ? this.renderNonEmptyPatch() : this.renderEmptyPatch()}
-
); } renderCommands() { let stageModeCommand = null; - if (this.props.filePatch.didChangeExecutableMode()) { + let stageSymlinkCommand = null; + + if (this.props.multiFilePatch.didAnyChangeExecutableMode()) { const command = this.props.stagingStatus === 'unstaged' ? 'github:stage-file-mode-change' : 'github:unstage-file-mode-change'; - stageModeCommand = ; + stageModeCommand = ; } - let stageSymlinkCommand = null; - if (this.props.filePatch.hasSymlink()) { + if (this.props.multiFilePatch.anyHaveTypechange()) { const command = this.props.stagingStatus === 'unstaged' ? 'github:stage-symlink-change' : 'github:unstage-symlink-change'; - stageSymlinkCommand = ; + stageSymlinkCommand = ; } return ( @@ -207,8 +210,8 @@ export default class FilePatchView extends React.Component { - - + + {stageModeCommand} {stageSymlinkCommand} @@ -225,7 +228,7 @@ export default class FilePatchView extends React.Component { )} - - - - {this.renderExecutableModeChangeMeta()} - {this.renderSymlinkChangeMeta()} - - - - - {this.renderHunkHeaders()} + {this.props.multiFilePatch.getFilePatches().map(this.renderFilePatchDecorations)} {this.renderLineDecorations( Array.from(this.props.selectedRows, row => Range.fromObject([[row, 0], [row, Infinity]])), @@ -285,17 +279,17 @@ export default class FilePatchView extends React.Component { )} {this.renderDecorationsOnLayer( - this.props.filePatch.getAdditionLayer(), + this.props.multiFilePatch.getAdditionLayer(), 'github-FilePatchView-line--added', {icon: true, line: true}, )} {this.renderDecorationsOnLayer( - this.props.filePatch.getDeletionLayer(), + this.props.multiFilePatch.getDeletionLayer(), 'github-FilePatchView-line--deleted', {icon: true, line: true}, )} {this.renderDecorationsOnLayer( - this.props.filePatch.getNoNewlineLayer(), + this.props.multiFilePatch.getNoNewlineLayer(), 'github-FilePatchView-line--nonewline', {icon: true, line: true}, )} @@ -304,13 +298,44 @@ export default class FilePatchView extends React.Component { ); } - renderExecutableModeChangeMeta() { - if (!this.props.filePatch.didChangeExecutableMode()) { + renderFilePatchDecorations = filePatch => { + return ( + + + + 0} + hasUndoHistory={this.props.hasUndoHistory} + hasMultipleFileSelections={this.props.hasMultipleFileSelections} + + tooltips={this.props.tooltips} + + undoLastDiscard={() => this.undoLastDiscardFromButton(filePatch)} + diveIntoMirrorPatch={() => this.props.diveIntoMirrorPatch(filePatch)} + openFile={() => this.didOpenFile({selectedFilePatch: filePatch})} + toggleFile={() => this.props.toggleFile(filePatch)} + /> + {this.renderSymlinkChangeMeta(filePatch)} + {this.renderExecutableModeChangeMeta(filePatch)} + + + + {this.renderHunkHeaders(filePatch)} + + ); + } + + renderExecutableModeChangeMeta(filePatch) { + if (!filePatch.didChangeExecutableMode()) { return null; } - const oldMode = this.props.filePatch.getOldMode(); - const newMode = this.props.filePatch.getNewMode(); + const oldMode = filePatch.getOldMode(); + const newMode = filePatch.getNewMode(); const attrs = this.props.stagingStatus === 'unstaged' ? { @@ -327,7 +352,7 @@ export default class FilePatchView extends React.Component { title="Mode change" actionIcon={attrs.actionIcon} actionText={attrs.actionText} - action={this.props.toggleModeChange}> + action={() => this.props.toggleModeChange(filePatch)}> File changed mode @@ -341,15 +366,15 @@ export default class FilePatchView extends React.Component { ); } - renderSymlinkChangeMeta() { - if (!this.props.filePatch.hasSymlink()) { + renderSymlinkChangeMeta(filePatch) { + if (!filePatch.hasSymlink()) { return null; } let detail =
; let title = ''; - const oldSymlink = this.props.filePatch.getOldSymlink(); - const newSymlink = this.props.filePatch.getNewSymlink(); + const oldSymlink = filePatch.getOldSymlink(); + const newSymlink = filePatch.getNewSymlink(); if (oldSymlink && newSymlink) { detail = ( @@ -410,7 +435,7 @@ export default class FilePatchView extends React.Component { title={title} actionIcon={attrs.actionIcon} actionText={attrs.actionText} - action={this.props.toggleSymlinkChange}> + action={() => this.props.toggleSymlinkChange(filePatch)}> {detail} @@ -418,18 +443,18 @@ export default class FilePatchView extends React.Component { ); } - renderHunkHeaders() { + renderHunkHeaders(filePatch) { const toggleVerb = this.props.stagingStatus === 'unstaged' ? 'Stage' : 'Unstage'; const selectedHunks = new Set( - Array.from(this.props.selectedRows, row => this.props.filePatch.getHunkAt(row)), + Array.from(this.props.selectedRows, row => this.props.multiFilePatch.getHunkAt(row)), ); return ( - {this.props.filePatch.getHunks().map((hunk, index) => { + {filePatch.getHunks().map((hunk, index) => { const containsSelection = this.props.selectionMode === 'line' && selectedHunks.has(hunk); - const isSelected = this.props.selectionMode === 'hunk' && selectedHunks.has(hunk); + const isSelected = (this.props.selectionMode === 'hunk') && selectedHunks.has(hunk); let buttonSuffix = ''; if (containsSelection) { @@ -452,7 +477,7 @@ export default class FilePatchView extends React.Component { return ( - + { - this.props.undoLastDiscard({eventSource: 'button'}); + undoLastDiscardFromButton = filePatch => { + this.props.undoLastDiscard(filePatch, {eventSource: 'button'}); } discardSelectionFromCommand = () => { @@ -570,7 +595,11 @@ export default class FilePatchView extends React.Component { toggleHunkSelection(hunk, containsSelection) { if (containsSelection) { - return this.props.toggleRows(this.props.selectedRows, this.props.selectionMode, {eventSource: 'button'}); + return this.props.toggleRows( + this.props.selectedRows, + this.props.selectionMode, + {eventSource: 'button'}, + ); } else { const changeRows = new Set( hunk.getChanges() @@ -579,13 +608,21 @@ export default class FilePatchView extends React.Component { return rows; }, []), ); - return this.props.toggleRows(changeRows, 'hunk', {eventSource: 'button'}); + return this.props.toggleRows( + changeRows, + 'hunk', + {eventSource: 'button'}, + ); } } discardHunkSelection(hunk, containsSelection) { if (containsSelection) { - return this.props.discardRows(this.props.selectedRows, this.props.selectionMode, {eventSource: 'button'}); + return this.props.discardRows( + this.props.selectedRows, + this.props.selectionMode, + {eventSource: 'button'}, + ); } else { const changeRows = new Set( hunk.getChanges() @@ -768,6 +805,22 @@ export default class FilePatchView extends React.Component { }); } + didToggleModeChange = () => { + return Promise.all( + Array.from(this.getSelectedFilePatches()) + .filter(fp => fp.didChangeExecutableMode()) + .map(this.props.toggleModeChange), + ); + } + + didToggleSymlinkChange = () => { + return Promise.all( + Array.from(this.getSelectedFilePatches()) + .filter(fp => fp.hasTypechange()) + .map(this.props.toggleSymlinkChange), + ); + } + selectNextHunk() { this.refEditor.map(editor => { const nextHunks = new Set( @@ -792,15 +845,16 @@ export default class FilePatchView extends React.Component { }); } - didOpenFile() { - const cursors = []; + didOpenFile({selectedFilePatch} = {}) { + const cursorsByFilePatch = new Map(); this.refEditor.map(editor => { const placedRows = new Set(); for (const cursor of editor.getCursors()) { const cursorRow = cursor.getBufferPosition().row; - const hunk = this.props.filePatch.getHunkAt(cursorRow); + const hunk = this.props.multiFilePatch.getHunkAt(cursorRow); + const filePatch = this.props.multiFilePatch.getFilePatchAt(cursorRow); /* istanbul ignore next */ if (!hunk) { continue; @@ -809,7 +863,7 @@ export default class FilePatchView extends React.Component { let newRow = hunk.getNewRowAt(cursorRow); let newColumn = cursor.getBufferPosition().column; if (newRow === null) { - let nearestRow = hunk.getNewStartRow() - 1; + let nearestRow = hunk.getNewStartRow(); for (const region of hunk.getRegions()) { if (!region.includesBufferRow(cursorRow)) { region.when({ @@ -833,14 +887,34 @@ export default class FilePatchView extends React.Component { } if (newRow !== null) { - cursors.push([newRow, newColumn]); + // Why is this needed? I _think_ everything is in terms of buffer position + // so there shouldn't be an off-by-one issue + newRow -= 1; + const cursors = cursorsByFilePatch.get(filePatch); + if (!cursors) { + cursorsByFilePatch.set(filePatch, [[newRow, newColumn]]); + } else { + cursors.push([newRow, newColumn]); + } } } return null; }); - this.props.openFile(cursors); + const filePatchesWithCursors = new Set(cursorsByFilePatch.keys()); + if (selectedFilePatch && !filePatchesWithCursors.has(selectedFilePatch)) { + const [firstHunk] = selectedFilePatch.getHunks(); + const cursorRow = firstHunk ? firstHunk.getNewStartRow() - 1 : 0; + return this.props.openFile(selectedFilePatch, [[cursorRow, 0]], true); + } else { + const pending = cursorsByFilePatch.size === 1; + return Promise.all(Array.from(cursorsByFilePatch, value => { + const [filePatch, cursors] = value; + return this.props.openFile(filePatch, cursors, pending); + })); + } + } getSelectedRows() { @@ -882,11 +956,21 @@ export default class FilePatchView extends React.Component { if (this.suppressChanges) { return; } - this.props.selectedRowsChanged(this.getSelectedRows(), this.nextSelectionMode || 'line'); + + const nextCursorRows = this.refEditor.map(editor => { + return editor.getCursorBufferPositions().map(position => position.row); + }).getOr([]); + const hasMultipleFileSelections = this.props.multiFilePatch.spansMultipleFiles(nextCursorRows); + + this.props.selectedRowsChanged( + this.getSelectedRows(), + this.nextSelectionMode || 'line', + hasMultipleFileSelections, + ); } oldLineNumberLabel({bufferRow, softWrapped}) { - const hunk = this.props.filePatch.getHunkAt(bufferRow); + const hunk = this.props.multiFilePatch.getHunkAt(bufferRow); if (hunk === undefined) { return this.pad(''); } @@ -900,7 +984,7 @@ export default class FilePatchView extends React.Component { } newLineNumberLabel({bufferRow, softWrapped}) { - const hunk = this.props.filePatch.getHunkAt(bufferRow); + const hunk = this.props.multiFilePatch.getHunkAt(bufferRow); if (hunk === undefined) { return this.pad(''); } @@ -925,7 +1009,7 @@ export default class FilePatchView extends React.Component { const seen = new Set(); return editor.getSelectedBufferRanges().reduce((acc, range) => { for (const row of range.getRows()) { - const hunk = this.props.filePatch.getHunkAt(row); + const hunk = this.props.multiFilePatch.getHunkAt(row); if (!hunk || seen.has(hunk)) { continue; } @@ -938,18 +1022,35 @@ export default class FilePatchView extends React.Component { }).getOr([]); } + /* + * Return a Set of FilePatches that include at least one editor selection. The selection need not contain an actual + * change row. + */ + getSelectedFilePatches() { + return this.refEditor.map(editor => { + const patches = new Set(); + for (const range of editor.getSelectedBufferRanges()) { + for (const row of range.getRows()) { + const patch = this.props.multiFilePatch.getFilePatchAt(row); + patches.add(patch); + } + } + return patches; + }).getOr(new Set()); + } + getHunkBefore(hunk) { const prevRow = hunk.getRange().start.row - 1; - return this.props.filePatch.getHunkAt(prevRow); + return this.props.multiFilePatch.getHunkAt(prevRow); } getHunkAfter(hunk) { const nextRow = hunk.getRange().end.row + 1; - return this.props.filePatch.getHunkAt(nextRow); + return this.props.multiFilePatch.getHunkAt(nextRow); } isChangeRow(bufferRow) { - const changeLayers = [this.props.filePatch.getAdditionLayer(), this.props.filePatch.getDeletionLayer()]; + const changeLayers = [this.props.multiFilePatch.getAdditionLayer(), this.props.multiFilePatch.getDeletionLayer()]; return changeLayers.some(layer => layer.findMarkers({intersectsRow: bufferRow}).length > 0); } @@ -963,7 +1064,7 @@ export default class FilePatchView extends React.Component { } pad(num) { - const maxDigits = this.props.filePatch.getMaxLineNumberWidth(); + const maxDigits = this.props.multiFilePatch.getMaxLineNumberWidth(); if (num === null) { return NBSP_CHARACTER.repeat(maxDigits); } else { diff --git a/lib/views/staging-view.js b/lib/views/staging-view.js index fae58d5cf04..8629ff89534 100644 --- a/lib/views/staging-view.js +++ b/lib/views/staging-view.js @@ -13,7 +13,7 @@ import MergeConflictListItemView from './merge-conflict-list-item-view'; import CompositeListSelection from '../models/composite-list-selection'; import ResolutionProgress from '../models/conflicts/resolution-progress'; import RefHolder from '../models/ref-holder'; -import FilePatchItem from '../items/file-patch-item'; +import ChangedFileItem from '../items/changed-file-item'; import Commands, {Command} from '../atom/commands'; import {autobind} from '../helpers'; import {addEvent} from '../reporter-proxy'; @@ -277,7 +277,7 @@ export default class StagingView extends React.Component { - + @@ -743,7 +743,7 @@ export default class StagingView extends React.Component { const activePane = this.props.workspace.getCenter().getActivePane(); const activePendingItem = activePane.getPendingItem(); const activePaneHasPendingFilePatchItem = activePendingItem && activePendingItem.getRealItem && - activePendingItem.getRealItem() instanceof FilePatchItem; + activePendingItem.getRealItem() instanceof ChangedFileItem; if (activePaneHasPendingFilePatchItem) { await this.showFilePatchItem(selectedItem.filePath, this.state.selection.getActiveListKey(), { activate: false, @@ -762,11 +762,13 @@ export default class StagingView extends React.Component { const pendingItem = pane.getPendingItem(); if (!pendingItem || !pendingItem.getRealItem) { return false; } const realItem = pendingItem.getRealItem(); - const isDiffViewItem = realItem instanceof FilePatchItem; + if (!(realItem instanceof ChangedFileItem)) { + return false; + } // We only want to update pending diff views for currently active repo const isInActiveRepo = realItem.getWorkingDirectory() === this.props.workingDirectoryPath; const isStale = !this.changedFileExists(realItem.getFilePath(), realItem.getStagingStatus()); - return isDiffViewItem && isInActiveRepo && isStale; + return isInActiveRepo && isStale; }); } @@ -777,19 +779,19 @@ export default class StagingView extends React.Component { } async showFilePatchItem(filePath, stagingStatus, {activate, pane} = {activate: false}) { - const uri = FilePatchItem.buildURI(filePath, this.props.workingDirectoryPath, stagingStatus); - const filePatchItem = await this.props.workspace.open( + const uri = ChangedFileItem.buildURI(filePath, this.props.workingDirectoryPath, stagingStatus); + const changedFileItem = await this.props.workspace.open( uri, {pending: true, activatePane: activate, activateItem: activate, pane}, ); if (activate) { - const itemRoot = filePatchItem.getElement(); + const itemRoot = changedFileItem.getElement(); const focusRoot = itemRoot.querySelector('[tabIndex]'); if (focusRoot) { focusRoot.focus(); } } else { // simply make item visible - this.props.workspace.paneForItem(filePatchItem).activateItem(filePatchItem); + this.props.workspace.paneForItem(changedFileItem).activateItem(changedFileItem); } } diff --git a/lib/watch-workspace-item.js b/lib/watch-workspace-item.js new file mode 100644 index 00000000000..bff7714121a --- /dev/null +++ b/lib/watch-workspace-item.js @@ -0,0 +1,72 @@ +import {CompositeDisposable} from 'atom'; + +import URIPattern from './atom/uri-pattern'; + +class ItemWatcher { + constructor(workspace, pattern, component, stateKey) { + this.workspace = workspace; + this.pattern = pattern instanceof URIPattern ? pattern : new URIPattern(pattern); + this.component = component; + this.stateKey = stateKey; + + this.activeItem = this.isActiveItem(); + this.subs = new CompositeDisposable(); + } + + isActiveItem() { + for (const pane of this.workspace.getPanes()) { + if (this.itemMatches(pane.getActiveItem())) { + return true; + } + } + return false; + } + + setInitialState() { + if (!this.component.state) { + this.component.state = {}; + } + this.component.state[this.stateKey] = this.activeItem; + return this; + } + + subscribeToWorkspace() { + this.subs.dispose(); + this.subs = new CompositeDisposable( + this.workspace.getCenter().onDidChangeActivePaneItem(this.updateActiveState), + ); + return this; + } + + updateActiveState = () => { + const wasActive = this.activeItem; + + this.activeItem = this.isActiveItem(); + // Update the component's state if it's changed as a result + if (wasActive && !this.activeItem) { + return new Promise(resolve => this.component.setState({[this.stateKey]: false}, resolve)); + } else if (!wasActive && this.activeItem) { + return new Promise(resolve => this.component.setState({[this.stateKey]: true}, resolve)); + } else { + return Promise.resolve(); + } + } + + setPattern(pattern) { + this.pattern = pattern instanceof URIPattern ? pattern : new URIPattern(pattern); + + return this.updateActiveState(); + } + + itemMatches = item => item && item.getURI && this.pattern.matches(item.getURI()).ok() + + dispose() { + this.subs.dispose(); + } +} + +export function watchWorkspaceItem(workspace, pattern, component, stateKey) { + return new ItemWatcher(workspace, pattern, component, stateKey) + .setInitialState() + .subscribeToWorkspace(); +} diff --git a/menus/git.cson b/menus/git.cson index 2d7db071a87..5986cccf54e 100644 --- a/menus/git.cson +++ b/menus/git.cson @@ -34,8 +34,8 @@ 'context-menu': '.github-FilePatchListView-item': [ { - 'label': 'Open File' - 'command': 'github:open-file' + 'label': 'Jump to File' + 'command': 'github:jump-to-file' } ] '.github-FilePatchView': [ @@ -43,8 +43,8 @@ 'type': 'separator' } { - 'label': 'Open File' - 'command': 'github:open-file' + 'label': 'Jump to File' + 'command': 'github:jump-to-file' 'after': ['core:confirm'] } ] diff --git a/package-lock.json b/package-lock.json index be0d4afb5c9..fdce2d3396f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5128,7 +5128,7 @@ }, "load-json-file": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", "dev": true, "requires": { diff --git a/package.json b/package.json index 929cfa6585e..b6f52b0e0a5 100644 --- a/package.json +++ b/package.json @@ -197,6 +197,7 @@ "IssueishPaneItem": "createIssueishPaneItemStub", "GitDockItem": "createDockItemStub", "GithubDockItem": "createDockItemStub", - "FilePatchControllerStub": "createFilePatchControllerStub" + "FilePatchControllerStub": "createFilePatchControllerStub", + "CommitPreviewStub": "createCommitPreviewStub" } } diff --git a/styles/commit-view.less b/styles/commit-view.less index b97f49b6f7f..5151292b49c 100644 --- a/styles/commit-view.less +++ b/styles/commit-view.less @@ -79,6 +79,12 @@ } } + &-buttonWrapper { + align-items: center; + display: flex; + margin-bottom: 10px; + } + &-coAuthorEditor { position: relative; margin-top: @component-padding / 2; diff --git a/styles/file-patch-view.less b/styles/file-patch-view.less index c04a3445b02..3b4fec51bb5 100644 --- a/styles/file-patch-view.less +++ b/styles/file-patch-view.less @@ -2,8 +2,7 @@ @import "octicon-utf-codes"; @import "octicon-mixins"; -@hunk-fg-color: @text-color-subtle; -@hunk-bg-color: @pane-item-background-color; +@header-bg-color: mix(@syntax-text-color, @syntax-background-color, 6%); .github-FilePatchView { display: flex; @@ -24,14 +23,35 @@ padding: @component-padding; } + .github-FilePatchView-controlBlock { + padding: @component-padding*2 @component-padding @component-padding 0; + background-color: @syntax-background-color; + + & + .github-FilePatchView-controlBlock { + padding-top: 0; + } + } + + // Editor overrides + + atom-text-editor { + .selection .region { + background-color: mix(@button-background-color-selected, @syntax-background-color, 24%); + } + } + &-header { display: flex; justify-content: space-between; align-items: center; + margin-top: @component-padding*2; padding: @component-padding/2; padding-left: @component-padding; - border-bottom: 1px solid @base-border-color; - background-color: @pane-item-background-color; + border: 1px solid @base-border-color; + border-radius: @component-border-radius; + font-family: system-ui; + background-color: @header-bg-color; + cursor: default; .btn { font-size: .9em; @@ -92,14 +112,14 @@ &-metaHeader { display: flex; align-items: center; - padding: @component-padding; + padding: @component-padding / 2; background-color: @background-color-highlight; } &-metaTitle { flex: 1; margin: 0; - font-size: 1.25em; + font-size: 1.0em; line-height: 1.5; overflow: hidden; text-overflow: ellipsis; @@ -161,10 +181,10 @@ &-line { // mixin .hunk-line-mixin(@bg;) { - background-color: fade(@bg, 18%); + background-color: fade(@bg, 16%); &.line.cursor-line { - background-color: fade(@bg, 28%); + background-color: fade(@bg, 22%); } } @@ -225,4 +245,22 @@ } } } + + // Inactive + + &--inactive .highlights .highlight.selection { + display: none; + } + + // Readonly editor + + atom-text-editor[readonly] { + .cursors { + display: none; + } + } +} + +.gitub-FilePatchHeaderView-basename { + font-weight: bold; } diff --git a/styles/hunk-header-view.less b/styles/hunk-header-view.less index a3f88c8ba0b..11cffdec766 100644 --- a/styles/hunk-header-view.less +++ b/styles/hunk-header-view.less @@ -1,7 +1,9 @@ @import "variables"; @hunk-fg-color: @text-color-subtle; -@hunk-bg-color: mix(@syntax-text-color, @syntax-background-color, 4%); +@hunk-bg-color: mix(@syntax-text-color, @syntax-background-color, 0%); +@hunk-bg-color-hover: mix(@syntax-text-color, @syntax-background-color, 4%); +@hunk-bg-color-active: mix(@syntax-text-color, @syntax-background-color, 2%); .github-HunkHeaderView { font-family: Menlo, Consolas, 'DejaVu Sans Mono', monospace; @@ -9,6 +11,8 @@ display: flex; align-items: stretch; font-size: .9em; + border: 1px solid @base-border-color; + border-radius: @component-border-radius; background-color: @hunk-bg-color; cursor: default; @@ -21,8 +25,8 @@ white-space: nowrap; text-overflow: ellipsis; -webkit-font-smoothing: antialiased; - &:hover { background-color: mix(@syntax-text-color, @syntax-background-color, 8%); } - &:active { background-color: mix(@syntax-text-color, @syntax-background-color, 2%); } + &:hover { background-color: @hunk-bg-color-hover; } + &:active { background-color: @hunk-bg-color-active; } } &-stageButton, @@ -32,10 +36,11 @@ padding-right: @component-padding; font-family: @font-family; border: none; + border-left: inherit; background-color: transparent; cursor: default; - &:hover { background-color: mix(@syntax-text-color, @syntax-background-color, 8%); } - &:active { background-color: mix(@syntax-text-color, @syntax-background-color, 2%); } + &:hover { background-color: @hunk-bg-color-hover; } + &:active { background-color: @hunk-bg-color-active; } .keystroke { margin-right: 1em; @@ -46,6 +51,7 @@ &-discardButton:before { text-align: left; width: auto; + vertical-align: 2px; } } @@ -57,14 +63,15 @@ &-title, &-stageButton, &-discardButton { - &:hover { background-color: mix(@syntax-text-color, @syntax-background-color, 8%); } - &:active { background-color: mix(@syntax-text-color, @syntax-background-color, 2%); } + &:hover { background-color: @hunk-bg-color-hover; } + &:active { background-color: @hunk-bg-color-active; } } } .github-HunkHeaderView--isSelected { color: contrast(@button-background-color-selected); background-color: @button-background-color-selected; + border-color: transparent; .github-HunkHeaderView-title { color: inherit; } @@ -75,19 +82,3 @@ &:active { background-color: darken(@button-background-color-selected, 4%); } } } - - -// Hacks ----------------------- -// Please unhack (one day TM) - -// Make the gap in the gutter also use the same background as .github-HunkHeaderView -// Note: This only works with the default font-size -.github-FilePatchView .line-number[style="margin-top: 30px;"]:before { - content: ""; - position: absolute; - left: 0; - right: 0; - top: -30px; - height: 30px; - background-color: @hunk-bg-color; -} diff --git a/test/atom/decoration.test.js b/test/atom/decoration.test.js index dcde9c44128..2e9fd4fdcd3 100644 --- a/test/atom/decoration.test.js +++ b/test/atom/decoration.test.js @@ -44,7 +44,7 @@ describe('Decoration', function() { it('creates a block decoration', function() { const app = ( - +
This is a subtree
@@ -55,7 +55,9 @@ describe('Decoration', function() { const args = editor.decorateMarker.firstCall.args; assert.equal(args[0], marker); assert.equal(args[1].type, 'block'); - const child = args[1].item.getElement().firstElementChild; + const element = args[1].item.getElement(); + assert.strictEqual(element.className, 'react-atom-decoration parent'); + const child = element.firstElementChild; assert.equal(child.className, 'decoration-subtree'); assert.equal(child.textContent, 'This is a subtree'); }); diff --git a/test/atom/uri-pattern.test.js b/test/atom/uri-pattern.test.js index 4cb77a52ade..a36f332401c 100644 --- a/test/atom/uri-pattern.test.js +++ b/test/atom/uri-pattern.test.js @@ -38,6 +38,14 @@ describe('URIPattern', function() { assert.isTrue(pattern.matches('proto://host/foo#exact').ok()); assert.isFalse(pattern.matches('proto://host/foo#nope').ok()); }); + + it('escapes and unescapes dashes', function() { + assert.isTrue( + new URIPattern('atom-github://with-many-dashes') + .matches('atom-github://with-many-dashes') + .ok(), + ); + }); }); describe('parameter placeholders', function() { diff --git a/test/builder/patch.js b/test/builder/patch.js new file mode 100644 index 00000000000..18ff5067b52 --- /dev/null +++ b/test/builder/patch.js @@ -0,0 +1,326 @@ +// Builders for classes related to MultiFilePatches. + +import {TextBuffer} from 'atom'; +import MultiFilePatch from '../../lib/models/patch/multi-file-patch'; +import FilePatch from '../../lib/models/patch/file-patch'; +import File, {nullFile} from '../../lib/models/patch/file'; +import Patch from '../../lib/models/patch/patch'; +import Hunk from '../../lib/models/patch/hunk'; +import {Unchanged, Addition, Deletion, NoNewline} from '../../lib/models/patch/region'; + +class LayeredBuffer { + constructor() { + this.buffer = new TextBuffer(); + this.layers = ['patch', 'hunk', 'unchanged', 'addition', 'deletion', 'noNewline'].reduce((layers, name) => { + layers[name] = this.buffer.addMarkerLayer(); + return layers; + }, {}); + } + + getInsertionPoint() { + return this.buffer.getEndPosition(); + } + + getLayer(markerLayerName) { + const layer = this.layers[markerLayerName]; + if (!layer) { + throw new Error(`invalid marker layer name: ${markerLayerName}`); + } + return layer; + } + + appendMarked(markerLayerName, lines) { + const startPosition = this.buffer.getEndPosition(); + const layer = this.getLayer(markerLayerName); + this.buffer.append(lines.join('\n')); + const marker = layer.markRange([startPosition, this.buffer.getEndPosition()], {exclusive: true}); + this.buffer.append('\n'); + return marker; + } + + markFrom(markerLayerName, startPosition) { + const endPosition = this.buffer.getEndPosition().translate([-1, Infinity]); + const layer = this.getLayer(markerLayerName); + return layer.markRange([startPosition, endPosition], {exclusive: true}); + } + + wrapReturn(object) { + return { + buffer: this.buffer, + layers: this.layers, + ...object, + }; + } +} + +class MultiFilePatchBuilder { + constructor(layeredBuffer = null) { + this.layeredBuffer = layeredBuffer; + + this.filePatches = []; + } + + addFilePatch(block = () => {}) { + const filePatch = new FilePatchBuilder(this.layeredBuffer); + block(filePatch); + this.filePatches.push(filePatch.build().filePatch); + return this; + } + + build() { + return this.layeredBuffer.wrapReturn({ + multiFilePatch: new MultiFilePatch({ + buffer: this.layeredBuffer.buffer, + layers: this.layeredBuffer.layers, + filePatches: this.filePatches, + }), + }); + } +} + +class FilePatchBuilder { + constructor(layeredBuffer = null) { + this.layeredBuffer = layeredBuffer; + + this.oldFile = new File({path: 'file', mode: File.modes.NORMAL}); + this.newFile = null; + + this.patchBuilder = new PatchBuilder(this.layeredBuffer); + } + + setOldFile(block) { + const file = new FileBuilder(); + block(file); + this.oldFile = file.build().file; + return this; + } + + nullOldFile() { + this.oldFile = nullFile; + return this; + } + + setNewFile(block) { + const file = new FileBuilder(); + block(file); + this.newFile = file.build().file; + return this; + } + + nullNewFile() { + this.newFile = nullFile; + return this; + } + + status(...args) { + this.patchBuilder.status(...args); + return this; + } + + addHunk(...args) { + this.patchBuilder.addHunk(...args); + return this; + } + + empty() { + this.patchBuilder.empty(); + return this; + } + + build() { + const {patch} = this.patchBuilder.build(); + + if (this.newFile === null) { + this.newFile = this.oldFile.clone(); + } + + return this.layeredBuffer.wrapReturn({ + filePatch: new FilePatch(this.oldFile, this.newFile, patch), + }); + } +} + +class FileBuilder { + constructor() { + this._path = 'file.txt'; + this._mode = File.modes.NORMAL; + this._symlink = null; + } + + path(thePath) { + this._path = thePath; + return this; + } + + mode(theMode) { + this._mode = theMode; + return this; + } + + executable() { + return this.mode('100755'); + } + + symlinkTo(destinationPath) { + this._symlink = destinationPath; + return this.mode('120000'); + } + + build() { + return {file: new File({path: this._path, mode: this._mode, symlink: this._symlink})}; + } +} + +class PatchBuilder { + constructor(layeredBuffer = null) { + this.layeredBuffer = layeredBuffer; + + this._status = 'modified'; + this.hunks = []; + + this.patchStart = this.layeredBuffer.getInsertionPoint(); + this.drift = 0; + this.explicitlyEmpty = false; + } + + status(st) { + if (['modified', 'added', 'deleted'].indexOf(st) === -1) { + throw new Error(`Unrecognized status: ${st} (must be 'modified', 'added' or 'deleted')`); + } + + this._status = st; + return this; + } + + addHunk(block = () => {}) { + const builder = new HunkBuilder(this.layeredBuffer, this.drift); + block(builder); + const {hunk, drift} = builder.build(); + this.hunks.push(hunk); + this.drift = drift; + return this; + } + + empty() { + this.explicitlyEmpty = true; + return this; + } + + build() { + if (this.hunks.length === 0 && !this.explicitlyEmpty) { + if (this._status === 'modified') { + this.addHunk(hunk => hunk.oldRow(1).unchanged('0000').added('0001').deleted('0002').unchanged('0003')); + this.addHunk(hunk => hunk.oldRow(10).unchanged('0004').added('0005').deleted('0006').unchanged('0007')); + } else if (this._status === 'added') { + this.addHunk(hunk => hunk.oldRow(1).added('0000', '0001', '0002', '0003')); + } else if (this._status === 'deleted') { + this.addHunk(hunk => hunk.oldRow(1).deleted('0000', '0001', '0002', '0003')); + } + } + + const marker = this.layeredBuffer.markFrom('patch', this.patchStart); + + return this.layeredBuffer.wrapReturn({ + patch: new Patch({status: this._status, hunks: this.hunks, marker}), + }); + } +} + +class HunkBuilder { + constructor(layeredBuffer = null, drift = 0) { + this.layeredBuffer = layeredBuffer; + this.drift = drift; + + this.oldStartRow = 0; + this.oldRowCount = null; + this.newStartRow = null; + this.newRowCount = null; + + this.sectionHeading = "don't care"; + + this.hunkStartPoint = this.layeredBuffer.getInsertionPoint(); + this.regions = []; + } + + oldRow(rowNumber) { + this.oldStartRow = rowNumber; + return this; + } + + unchanged(...lines) { + this.regions.push(new Unchanged(this.layeredBuffer.appendMarked('unchanged', lines))); + return this; + } + + added(...lines) { + this.regions.push(new Addition(this.layeredBuffer.appendMarked('addition', lines))); + return this; + } + + deleted(...lines) { + this.regions.push(new Deletion(this.layeredBuffer.appendMarked('deletion', lines))); + return this; + } + + noNewline() { + this.regions.push(new NoNewline(this.layeredBuffer.appendMarked('noNewline', [' No newline at end of file']))); + return this; + } + + build() { + if (this.regions.length === 0) { + this.unchanged('0000').added('0001').deleted('0002').unchanged('0003'); + } + + if (this.oldRowCount === null) { + this.oldRowCount = this.regions.reduce((count, region) => region.when({ + unchanged: () => count + region.bufferRowCount(), + deletion: () => count + region.bufferRowCount(), + default: () => count, + }), 0); + } + + if (this.newStartRow === null) { + this.newStartRow = this.oldStartRow + this.drift; + } + + if (this.newRowCount === null) { + this.newRowCount = this.regions.reduce((count, region) => region.when({ + unchanged: () => count + region.bufferRowCount(), + addition: () => count + region.bufferRowCount(), + default: () => count, + }), 0); + } + + const marker = this.layeredBuffer.markFrom('hunk', this.hunkStartPoint); + + return this.layeredBuffer.wrapReturn({ + hunk: new Hunk({ + oldStartRow: this.oldStartRow, + oldRowCount: this.oldRowCount, + newStartRow: this.newStartRow, + newRowCount: this.newRowCount, + sectionHeading: this.sectionHeading, + marker, + regions: this.regions, + }), + drift: this.drift + this.newRowCount - this.oldRowCount, + }); + } +} + +export function multiFilePatchBuilder() { + return new MultiFilePatchBuilder(new LayeredBuffer()); +} + +export function filePatchBuilder() { + return new FilePatchBuilder(new LayeredBuffer()); +} + +export function patchBuilder() { + return new PatchBuilder(new LayeredBuffer()); +} + +export function hunkBuilder() { + return new HunkBuilder(new LayeredBuffer()); +} diff --git a/test/containers/file-patch-container.test.js b/test/containers/changed-file-container.test.js similarity index 67% rename from test/containers/file-patch-container.test.js rename to test/containers/changed-file-container.test.js index a4e12a65f58..dd8fbf01f7e 100644 --- a/test/containers/file-patch-container.test.js +++ b/test/containers/changed-file-container.test.js @@ -3,10 +3,11 @@ import fs from 'fs-extra'; import React from 'react'; import {mount} from 'enzyme'; -import FilePatchContainer from '../../lib/containers/file-patch-container'; +import ChangedFileContainer from '../../lib/containers/changed-file-container'; +import ChangedFileItem from '../../lib/items/changed-file-item'; import {cloneRepository, buildRepository} from '../helpers'; -describe('FilePatchContainer', function() { +describe('ChangedFileContainer', function() { let atomEnv, repository; beforeEach(async function() { @@ -34,19 +35,23 @@ describe('FilePatchContainer', function() { repository, stagingStatus: 'unstaged', relPath: 'a.txt', + itemType: ChangedFileItem, + workspace: atomEnv.workspace, commands: atomEnv.commands, keymaps: atomEnv.keymaps, tooltips: atomEnv.tooltips, config: atomEnv.config, + discardLines: () => {}, undoLastDiscard: () => {}, surfaceFileAtPath: () => {}, destroy: () => {}, + ...overrideProps, }; - return ; + return ; } it('renders a loading spinner before file patch data arrives', function() { @@ -54,45 +59,45 @@ describe('FilePatchContainer', function() { assert.isTrue(wrapper.find('LoadingView').exists()); }); - it('renders a FilePatchView', async function() { + it('renders a ChangedFileController', async function() { const wrapper = mount(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - await assert.async.isTrue(wrapper.update().find('FilePatchView').exists()); + await assert.async.isTrue(wrapper.update().find('ChangedFileController').exists()); }); it('adopts the buffer from the previous FilePatch when a new one arrives', async function() { const wrapper = mount(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - await assert.async.isTrue(wrapper.update().find('FilePatchController').exists()); + await assert.async.isTrue(wrapper.update().find('ChangedFileController').exists()); - const prevPatch = wrapper.find('FilePatchController').prop('filePatch'); + const prevPatch = wrapper.find('ChangedFileController').prop('multiFilePatch'); const prevBuffer = prevPatch.getBuffer(); await fs.writeFile(path.join(repository.getWorkingDirectoryPath(), 'a.txt'), 'changed\nagain\n'); repository.refresh(); - await assert.async.notStrictEqual(wrapper.update().find('FilePatchController').prop('filePatch'), prevPatch); + await assert.async.notStrictEqual(wrapper.update().find('ChangedFileController').prop('multiFilePatch'), prevPatch); - const nextBuffer = wrapper.find('FilePatchController').prop('filePatch').getBuffer(); + const nextBuffer = wrapper.find('ChangedFileController').prop('multiFilePatch').getBuffer(); assert.strictEqual(nextBuffer, prevBuffer); }); it('does not adopt a buffer from an unchanged patch', async function() { const wrapper = mount(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - await assert.async.isTrue(wrapper.update().find('FilePatchController').exists()); + await assert.async.isTrue(wrapper.update().find('ChangedFileController').exists()); - const prevPatch = wrapper.find('FilePatchController').prop('filePatch'); + const prevPatch = wrapper.find('ChangedFileController').prop('multiFilePatch'); sinon.spy(prevPatch, 'adoptBufferFrom'); wrapper.setProps({}); assert.isFalse(prevPatch.adoptBufferFrom.called); - const nextPatch = wrapper.find('FilePatchController').prop('filePatch'); + const nextPatch = wrapper.find('ChangedFileController').prop('multiFilePatch'); assert.strictEqual(nextPatch, prevPatch); }); it('passes unrecognized props to the FilePatchView', async function() { const extra = Symbol('extra'); const wrapper = mount(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged', extra})); - await assert.async.strictEqual(wrapper.update().find('FilePatchView').prop('extra'), extra); + await assert.async.strictEqual(wrapper.update().find('MultiFilePatchView').prop('extra'), extra); }); }); diff --git a/test/containers/commit-preview-container.test.js b/test/containers/commit-preview-container.test.js new file mode 100644 index 00000000000..c11a5a91646 --- /dev/null +++ b/test/containers/commit-preview-container.test.js @@ -0,0 +1,74 @@ +import React from 'react'; +import {mount} from 'enzyme'; + +import CommitPreviewContainer from '../../lib/containers/commit-preview-container'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; +import {cloneRepository, buildRepository} from '../helpers'; + +describe('CommitPreviewContainer', function() { + let atomEnv, repository; + + beforeEach(async function() { + atomEnv = global.buildAtomEnvironment(); + + const workdir = await cloneRepository(); + repository = await buildRepository(workdir); + }); + + afterEach(function() { + atomEnv.destroy(); + }); + + function buildApp(override = {}) { + + const props = { + repository, + itemType: CommitPreviewItem, + + workspace: atomEnv.workspace, + commands: atomEnv.commands, + keymaps: atomEnv.keymaps, + tooltips: atomEnv.tooltips, + config: atomEnv.config, + + destroy: () => {}, + discardLines: () => {}, + undoLastDiscard: () => {}, + surfaceToCommitPreviewButton: () => {}, + + ...override, + }; + + return ; + } + + it('renders a loading spinner while the repository is loading', function() { + const wrapper = mount(buildApp()); + assert.isTrue(wrapper.find('LoadingView').exists()); + }); + + it('renders a loading spinner while the file patch is being loaded', async function() { + await repository.getLoadPromise(); + const patchPromise = repository.getStagedChangesPatch(); + let resolveDelayedPromise = () => {}; + const delayedPromise = new Promise(resolve => { + resolveDelayedPromise = resolve; + }); + sinon.stub(repository, 'getStagedChangesPatch').returns(delayedPromise); + + const wrapper = mount(buildApp()); + + assert.isTrue(wrapper.find('LoadingView').exists()); + resolveDelayedPromise(patchPromise); + await assert.async.isFalse(wrapper.update().find('LoadingView').exists()); + }); + + it('renders a CommitPreviewController once the file patch is loaded', async function() { + await repository.getLoadPromise(); + const patch = await repository.getStagedChangesPatch(); + + const wrapper = mount(buildApp()); + await assert.async.isTrue(wrapper.update().find('CommitPreviewController').exists()); + assert.strictEqual(wrapper.find('CommitPreviewController').prop('multiFilePatch'), patch); + }); +}); diff --git a/test/controllers/changed-file-controller.test.js b/test/controllers/changed-file-controller.test.js new file mode 100644 index 00000000000..6247764cdf3 --- /dev/null +++ b/test/controllers/changed-file-controller.test.js @@ -0,0 +1,59 @@ +import React from 'react'; +import {shallow} from 'enzyme'; + +import ChangedFileController from '../../lib/controllers/changed-file-controller'; +import {cloneRepository, buildRepository} from '../helpers'; + +describe('ChangedFileController', function() { + let atomEnv, repository; + + beforeEach(async function() { + atomEnv = global.buildAtomEnvironment(); + repository = await buildRepository(await cloneRepository('three-files')); + }); + + afterEach(function() { + atomEnv.destroy(); + }); + + function buildApp(override = {}) { + const props = { + repository, + stagingStatus: 'unstaged', + relPath: 'file.txt', + + workspace: atomEnv.workspace, + commands: atomEnv.commands, + keymaps: atomEnv.keymaps, + tooltips: atomEnv.tooltips, + config: atomEnv.config, + + destroy: () => {}, + undoLastDiscard: () => {}, + surfaceFileAtPath: () => {}, + + ...override, + }; + + return ; + } + + it('passes unrecognized props to a MultiFilePatchController', function() { + const extra = Symbol('extra'); + const wrapper = shallow(buildApp({extra})); + + assert.strictEqual(wrapper.find('MultiFilePatchController').prop('extra'), extra); + }); + + it('calls surfaceFileAtPath with fixed arguments', function() { + const surfaceFileAtPath = sinon.spy(); + const wrapper = shallow(buildApp({ + relPath: 'whatever.js', + stagingStatus: 'staged', + surfaceFileAtPath, + })); + wrapper.find('MultiFilePatchController').prop('surface')(); + + assert.isTrue(surfaceFileAtPath.calledWith('whatever.js', 'staged')); + }); +}); diff --git a/test/controllers/commit-controller.test.js b/test/controllers/commit-controller.test.js index b73caf53630..553c43b8e02 100644 --- a/test/controllers/commit-controller.test.js +++ b/test/controllers/commit-controller.test.js @@ -6,8 +6,10 @@ import {shallow, mount} from 'enzyme'; import Commit from '../../lib/models/commit'; import {nullBranch} from '../../lib/models/branch'; import UserStore from '../../lib/models/user-store'; +import URIPattern from '../../lib/atom/uri-pattern'; import CommitController, {COMMIT_GRAMMAR_SCOPE} from '../../lib/controllers/commit-controller'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; import {cloneRepository, buildRepository, buildRepositoryWithPipeline} from '../helpers'; import * as reporterProxy from '../../lib/reporter-proxy'; @@ -28,6 +30,23 @@ describe('CommitController', function() { const noop = () => {}; const store = new UserStore({config}); + // Ensure the Workspace doesn't mangle atom-github://... URIs. + // If you don't have an opener registered for a non-standard URI protocol, the Workspace coerces it into a file URI + // and tries to open it with a TextEditor. In the process, the URI gets mangled: + // + // atom.workspace.open('atom-github://unknown/whatever').then(item => console.log(item.getURI())) + // > 'atom-github:/unknown/whatever' + // + // Adding an opener that creates fake items prevents it from doing this and keeps the URIs unchanged. + const pattern = new URIPattern(CommitPreviewItem.uriPattern); + workspace.addOpener(uri => { + if (pattern.matches(uri).ok()) { + return {getURI() { return uri; }}; + } else { + return undefined; + } + }); + app = ( i.getURI()), previewURI); + assert.notStrictEqual(workspace.getActivePaneItem().getURI(), previewURI); + assert.isFalse(wrapper.find('CommitView').prop('commitPreviewActive')); + + await wrapper.find('CommitView').prop('toggleCommitPreview')(); + + // Open as active pane item again + assert.strictEqual(workspace.getActivePaneItem().getURI(), previewURI); + assert.strictEqual(workspace.getActivePaneItem(), workspace.paneForURI(previewURI).getPendingItem()); + assert.isTrue(wrapper.find('CommitView').prop('commitPreviewActive')); + + await wrapper.find('CommitView').prop('toggleCommitPreview')(); + + // Commit preview closed + assert.notInclude(workspace.getPaneItems().map(i => i.getURI()), previewURI); + assert.isFalse(wrapper.find('CommitView').prop('commitPreviewActive')); + }); + + it('records a metrics event when pane is toggled', async function() { + sinon.stub(reporterProxy, 'addEvent'); + const workdir = await cloneRepository('three-files'); + const repository = await buildRepository(workdir); + + const wrapper = shallow(React.cloneElement(app, {repository})); + + assert.isFalse(reporterProxy.addEvent.called); + + await wrapper.instance().toggleCommitPreview(); + + assert.isTrue(reporterProxy.addEvent.calledOnceWithExactly('toggle-commit-preview', {package: 'github'})); }); + + it('toggles the commit preview pane for the active repository', async function() { + const workdir0 = await cloneRepository('three-files'); + const repository0 = await buildRepository(workdir0); + + const workdir1 = await cloneRepository('three-files'); + const repository1 = await buildRepository(workdir1); + + const wrapper = shallow(React.cloneElement(app, {repository: repository0})); + + assert.isFalse(wrapper.find('CommitView').prop('commitPreviewActive')); + + await wrapper.find('CommitView').prop('toggleCommitPreview')(); + assert.isTrue(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir0))); + assert.isFalse(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir1))); + assert.isTrue(wrapper.find('CommitView').prop('commitPreviewActive')); + + wrapper.setProps({repository: repository1}); + assert.isTrue(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir0))); + assert.isFalse(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir1))); + assert.isFalse(wrapper.find('CommitView').prop('commitPreviewActive')); + + await wrapper.find('CommitView').prop('toggleCommitPreview')(); + assert.isFalse(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir0))); + assert.isTrue(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir1))); + assert.isTrue(wrapper.find('CommitView').prop('commitPreviewActive')); + + await wrapper.find('CommitView').prop('toggleCommitPreview')(); + assert.isFalse(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir0))); + assert.isFalse(workspace.getPaneItems().some(item => item.getURI() === CommitPreviewItem.buildURI(workdir1))); + assert.isFalse(wrapper.find('CommitView').prop('commitPreviewActive')); + }); + }); + + it('unconditionally activates the commit preview item', async function() { + const workdir = await cloneRepository('three-files'); + const repository = await buildRepository(workdir); + const previewURI = CommitPreviewItem.buildURI(workdir); + + const wrapper = shallow(React.cloneElement(app, {repository})); + + await wrapper.find('CommitView').prop('activateCommitPreview')(); + assert.strictEqual(workspace.getActivePaneItem().getURI(), previewURI); + + await workspace.open(__filename); + assert.notStrictEqual(workspace.getActivePaneItem().getURI(), previewURI); + + await wrapper.find('CommitView').prop('activateCommitPreview')(); + assert.strictEqual(workspace.getActivePaneItem().getURI(), previewURI); }); }); diff --git a/test/controllers/commit-preview-controller.test.js b/test/controllers/commit-preview-controller.test.js new file mode 100644 index 00000000000..f4c6944bae6 --- /dev/null +++ b/test/controllers/commit-preview-controller.test.js @@ -0,0 +1,57 @@ +import React from 'react'; +import {shallow} from 'enzyme'; + +import CommitPreviewController from '../../lib/controllers/commit-preview-controller'; +import MultiFilePatch from '../../lib/models/patch/multi-file-patch'; +import {cloneRepository, buildRepository} from '../helpers'; + +describe('CommitPreviewController', function() { + let atomEnv, repository; + + beforeEach(async function() { + atomEnv = global.buildAtomEnvironment(); + repository = await buildRepository(await cloneRepository('three-files')); + }); + + afterEach(function() { + atomEnv.destroy(); + }); + + function buildApp(override = {}) { + const props = { + repository, + stagingStatus: 'unstaged', + multiFilePatch: new MultiFilePatch({}), + + workspace: atomEnv.workspace, + commands: atomEnv.commands, + keymaps: atomEnv.keymaps, + tooltips: atomEnv.tooltips, + config: atomEnv.config, + + destroy: () => {}, + discardLines: () => {}, + undoLastDiscard: () => {}, + surfaceToCommitPreviewButton: () => {}, + + ...override, + }; + + return ; + } + + it('passes unrecognized props to a MultiFilePatchController', function() { + const extra = Symbol('extra'); + const wrapper = shallow(buildApp({extra})); + + assert.strictEqual(wrapper.find('MultiFilePatchController').prop('extra'), extra); + }); + + it('calls surfaceToCommitPreviewButton', function() { + const surfaceToCommitPreviewButton = sinon.spy(); + const wrapper = shallow(buildApp({surfaceToCommitPreviewButton})); + wrapper.find('MultiFilePatchController').prop('surface')(); + + assert.isTrue(surfaceToCommitPreviewButton.called); + }); +}); diff --git a/test/controllers/file-patch-controller.test.js b/test/controllers/file-patch-controller.test.js deleted file mode 100644 index 2ea7d2d90e3..00000000000 --- a/test/controllers/file-patch-controller.test.js +++ /dev/null @@ -1,460 +0,0 @@ -import path from 'path'; -import fs from 'fs-extra'; -import React from 'react'; -import {shallow} from 'enzyme'; - -import FilePatchController from '../../lib/controllers/file-patch-controller'; -import FilePatch from '../../lib/models/patch/file-patch'; -import * as reporterProxy from '../../lib/reporter-proxy'; -import {cloneRepository, buildRepository} from '../helpers'; - -describe('FilePatchController', function() { - let atomEnv, repository, filePatch; - - beforeEach(async function() { - atomEnv = global.buildAtomEnvironment(); - - const workdirPath = await cloneRepository(); - repository = await buildRepository(workdirPath); - - // a.txt: unstaged changes - await fs.writeFile(path.join(workdirPath, 'a.txt'), '00\n01\n02\n03\n04\n05\n06'); - - filePatch = await repository.getFilePatchForPath('a.txt', {staged: false}); - }); - - afterEach(function() { - atomEnv.destroy(); - }); - - function buildApp(overrideProps = {}) { - const props = { - repository, - stagingStatus: 'unstaged', - relPath: 'a.txt', - isPartiallyStaged: false, - filePatch, - hasUndoHistory: false, - workspace: atomEnv.workspace, - commands: atomEnv.commands, - keymaps: atomEnv.keymaps, - tooltips: atomEnv.tooltips, - config: atomEnv.config, - destroy: () => {}, - discardLines: () => {}, - undoLastDiscard: () => {}, - surfaceFileAtPath: () => {}, - ...overrideProps, - }; - - return ; - } - - it('passes extra props to the FilePatchView', function() { - const extra = Symbol('extra'); - const wrapper = shallow(buildApp({extra})); - - assert.strictEqual(wrapper.find('FilePatchView').prop('extra'), extra); - }); - - it('calls undoLastDiscard through with set arguments', function() { - const undoLastDiscard = sinon.spy(); - const wrapper = shallow(buildApp({relPath: 'b.txt', undoLastDiscard})); - wrapper.find('FilePatchView').prop('undoLastDiscard')(); - - assert.isTrue(undoLastDiscard.calledWith('b.txt', repository)); - }); - - it('calls surfaceFileAtPath with set arguments', function() { - const surfaceFileAtPath = sinon.spy(); - const wrapper = shallow(buildApp({relPath: 'c.txt', surfaceFileAtPath})); - wrapper.find('FilePatchView').prop('surfaceFile')(); - - assert.isTrue(surfaceFileAtPath.calledWith('c.txt', 'unstaged')); - }); - - describe('diveIntoMirrorPatch()', function() { - it('destroys the current pane and opens the staged changes', async function() { - const destroy = sinon.spy(); - sinon.stub(atomEnv.workspace, 'open').resolves(); - const wrapper = shallow(buildApp({relPath: 'c.txt', stagingStatus: 'unstaged', destroy})); - - await wrapper.find('FilePatchView').prop('diveIntoMirrorPatch')(); - - assert.isTrue(destroy.called); - assert.isTrue(atomEnv.workspace.open.calledWith( - 'atom-github://file-patch/c.txt' + - `?workdir=${encodeURIComponent(repository.getWorkingDirectoryPath())}&stagingStatus=staged`, - )); - }); - - it('destroys the current pane and opens the unstaged changes', async function() { - const destroy = sinon.spy(); - sinon.stub(atomEnv.workspace, 'open').resolves(); - const wrapper = shallow(buildApp({relPath: 'd.txt', stagingStatus: 'staged', destroy})); - - await wrapper.find('FilePatchView').prop('diveIntoMirrorPatch')(); - - assert.isTrue(destroy.called); - assert.isTrue(atomEnv.workspace.open.calledWith( - 'atom-github://file-patch/d.txt' + - `?workdir=${encodeURIComponent(repository.getWorkingDirectoryPath())}&stagingStatus=unstaged`, - )); - }); - }); - - describe('openFile()', function() { - it('opens an editor on the current file', async function() { - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - const editor = await wrapper.find('FilePatchView').prop('openFile')([]); - - assert.strictEqual(editor.getPath(), path.join(repository.getWorkingDirectoryPath(), 'a.txt')); - }); - - it('sets the cursor to a single position', async function() { - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - const editor = await wrapper.find('FilePatchView').prop('openFile')([[1, 1]]); - - assert.deepEqual(editor.getCursorBufferPositions().map(p => p.serialize()), [[1, 1]]); - }); - - it('adds cursors at a set of positions', async function() { - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - const editor = await wrapper.find('FilePatchView').prop('openFile')([[1, 1], [3, 1], [5, 0]]); - - assert.deepEqual(editor.getCursorBufferPositions().map(p => p.serialize()), [[1, 1], [3, 1], [5, 0]]); - }); - }); - - describe('toggleFile()', function() { - it('stages the current file if unstaged', async function() { - sinon.spy(repository, 'stageFiles'); - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - - await wrapper.find('FilePatchView').prop('toggleFile')(); - - assert.isTrue(repository.stageFiles.calledWith(['a.txt'])); - }); - - it('unstages the current file if staged', async function() { - sinon.spy(repository, 'unstageFiles'); - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'staged'})); - - await wrapper.find('FilePatchView').prop('toggleFile')(); - - assert.isTrue(repository.unstageFiles.calledWith(['a.txt'])); - }); - - it('is a no-op if a staging operation is already in progress', async function() { - sinon.stub(repository, 'stageFiles').resolves('staged'); - sinon.stub(repository, 'unstageFiles').resolves('unstaged'); - - const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); - assert.strictEqual(await wrapper.find('FilePatchView').prop('toggleFile')(), 'staged'); - - // No-op - assert.isNull(await wrapper.find('FilePatchView').prop('toggleFile')()); - - // Simulate an identical patch arriving too soon - wrapper.setProps({filePatch: filePatch.clone()}); - - // Still a no-op - assert.isNull(await wrapper.find('FilePatchView').prop('toggleFile')()); - - // Simulate updated patch arrival - const promise = wrapper.instance().patchChangePromise; - wrapper.setProps({filePatch: FilePatch.createNull()}); - await promise; - - // Performs an operation again - assert.strictEqual(await wrapper.find('FilePatchView').prop('toggleFile')(), 'staged'); - }); - }); - - describe('selected row and selection mode tracking', function() { - it('captures the selected row set', function() { - const wrapper = shallow(buildApp()); - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), []); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'hunk'); - - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'line'); - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [1, 2]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'line'); - }); - - it('does not re-render if the row set and selection mode are unchanged', function() { - const wrapper = shallow(buildApp()); - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), []); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'hunk'); - - sinon.spy(wrapper.instance(), 'render'); - - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'line'); - - assert.isTrue(wrapper.instance().render.called); - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [1, 2]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'line'); - - wrapper.instance().render.resetHistory(); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([2, 1]), 'line'); - - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [1, 2]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'line'); - assert.isFalse(wrapper.instance().render.called); - - wrapper.instance().render.resetHistory(); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'hunk'); - - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [1, 2]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'hunk'); - assert.isTrue(wrapper.instance().render.called); - }); - - describe('discardLines()', function() { - it('records an event', async function() { - const wrapper = shallow(buildApp()); - sinon.stub(reporterProxy, 'addEvent'); - await wrapper.find('FilePatchView').prop('discardRows')(new Set([1, 2])); - assert.isTrue(reporterProxy.addEvent.calledWith('discard-unstaged-changes', { - package: 'github', - component: 'FilePatchController', - lineCount: 2, - eventSource: undefined, - })); - }); - }); - - describe('undoLastDiscard()', function() { - it('records an event', function() { - const wrapper = shallow(buildApp()); - sinon.stub(reporterProxy, 'addEvent'); - wrapper.find('FilePatchView').prop('undoLastDiscard')(); - assert.isTrue(reporterProxy.addEvent.calledWith('undo-last-discard', { - package: 'github', - component: 'FilePatchController', - eventSource: undefined, - })); - }); - }); - }); - - describe('toggleRows()', function() { - it('is a no-op with no selected rows', async function() { - const wrapper = shallow(buildApp()); - - sinon.spy(repository, 'applyPatchToIndex'); - - await wrapper.find('FilePatchView').prop('toggleRows')(); - assert.isFalse(repository.applyPatchToIndex.called); - }); - - it('applies a stage patch to the index', async function() { - const wrapper = shallow(buildApp()); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1])); - - sinon.spy(filePatch, 'getStagePatchForLines'); - sinon.spy(repository, 'applyPatchToIndex'); - - await wrapper.find('FilePatchView').prop('toggleRows')(); - - assert.sameMembers(Array.from(filePatch.getStagePatchForLines.lastCall.args[0]), [1]); - assert.isTrue(repository.applyPatchToIndex.calledWith(filePatch.getStagePatchForLines.returnValues[0])); - }); - - it('toggles a different row set if provided', async function() { - const wrapper = shallow(buildApp()); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1]), 'line'); - - sinon.spy(filePatch, 'getStagePatchForLines'); - sinon.spy(repository, 'applyPatchToIndex'); - - await wrapper.find('FilePatchView').prop('toggleRows')(new Set([2]), 'hunk'); - - assert.sameMembers(Array.from(filePatch.getStagePatchForLines.lastCall.args[0]), [2]); - assert.isTrue(repository.applyPatchToIndex.calledWith(filePatch.getStagePatchForLines.returnValues[0])); - - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [2]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'hunk'); - }); - - it('applies an unstage patch to the index', async function() { - await repository.stageFiles(['a.txt']); - const otherPatch = await repository.getFilePatchForPath('a.txt', {staged: true}); - const wrapper = shallow(buildApp({filePatch: otherPatch, stagingStatus: 'staged'})); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([2])); - - sinon.spy(otherPatch, 'getUnstagePatchForLines'); - sinon.spy(repository, 'applyPatchToIndex'); - - await wrapper.find('FilePatchView').prop('toggleRows')(); - - assert.sameMembers(Array.from(otherPatch.getUnstagePatchForLines.lastCall.args[0]), [2]); - assert.isTrue(repository.applyPatchToIndex.calledWith(otherPatch.getUnstagePatchForLines.returnValues[0])); - }); - }); - - if (process.platform !== 'win32') { - describe('toggleModeChange()', function() { - it("it stages an unstaged file's new mode", async function() { - const p = path.join(repository.getWorkingDirectoryPath(), 'a.txt'); - await fs.chmod(p, 0o755); - repository.refresh(); - const newFilePatch = await repository.getFilePatchForPath('a.txt', {staged: false}); - - const wrapper = shallow(buildApp({filePatch: newFilePatch, stagingStatus: 'unstaged'})); - - sinon.spy(repository, 'stageFileModeChange'); - await wrapper.find('FilePatchView').prop('toggleModeChange')(); - - assert.isTrue(repository.stageFileModeChange.calledWith('a.txt', '100755')); - }); - - it("it stages a staged file's old mode", async function() { - const p = path.join(repository.getWorkingDirectoryPath(), 'a.txt'); - await fs.chmod(p, 0o755); - await repository.stageFiles(['a.txt']); - repository.refresh(); - const newFilePatch = await repository.getFilePatchForPath('a.txt', {staged: true}); - - const wrapper = shallow(buildApp({filePatch: newFilePatch, stagingStatus: 'staged'})); - - sinon.spy(repository, 'stageFileModeChange'); - await wrapper.find('FilePatchView').prop('toggleModeChange')(); - - assert.isTrue(repository.stageFileModeChange.calledWith('a.txt', '100644')); - }); - }); - - describe('toggleSymlinkChange', function() { - it('handles an addition and typechange with a special repository method', async function() { - if (process.env.ATOM_GITHUB_SKIP_SYMLINKS) { - this.skip(); - return; - } - - const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); - const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); - await fs.writeFile(dest, 'asdf\n', 'utf8'); - await fs.symlink(dest, p); - - await repository.stageFiles(['waslink.txt', 'destination']); - await repository.commit('zero'); - - await fs.unlink(p); - await fs.writeFile(p, 'fdsa\n', 'utf8'); - - repository.refresh(); - const symlinkPatch = await repository.getFilePatchForPath('waslink.txt', {staged: false}); - const wrapper = shallow(buildApp({filePatch: symlinkPatch, relPath: 'waslink.txt', stagingStatus: 'unstaged'})); - - sinon.spy(repository, 'stageFileSymlinkChange'); - - await wrapper.find('FilePatchView').prop('toggleSymlinkChange')(); - - assert.isTrue(repository.stageFileSymlinkChange.calledWith('waslink.txt')); - }); - - it('stages non-addition typechanges normally', async function() { - if (process.env.ATOM_GITHUB_SKIP_SYMLINKS) { - this.skip(); - return; - } - - const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); - const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); - await fs.writeFile(dest, 'asdf\n', 'utf8'); - await fs.symlink(dest, p); - - await repository.stageFiles(['waslink.txt', 'destination']); - await repository.commit('zero'); - - await fs.unlink(p); - - repository.refresh(); - const symlinkPatch = await repository.getFilePatchForPath('waslink.txt', {staged: false}); - const wrapper = shallow(buildApp({filePatch: symlinkPatch, relPath: 'waslink.txt', stagingStatus: 'unstaged'})); - - sinon.spy(repository, 'stageFiles'); - - await wrapper.find('FilePatchView').prop('toggleSymlinkChange')(); - - assert.isTrue(repository.stageFiles.calledWith(['waslink.txt'])); - }); - - it('handles a deletion and typechange with a special repository method', async function() { - const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); - const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); - await fs.writeFile(dest, 'asdf\n', 'utf8'); - await fs.writeFile(p, 'fdsa\n', 'utf8'); - - await repository.stageFiles(['waslink.txt', 'destination']); - await repository.commit('zero'); - - await fs.unlink(p); - await fs.symlink(dest, p); - await repository.stageFiles(['waslink.txt']); - - repository.refresh(); - const symlinkPatch = await repository.getFilePatchForPath('waslink.txt', {staged: true}); - const wrapper = shallow(buildApp({filePatch: symlinkPatch, relPath: 'waslink.txt', stagingStatus: 'staged'})); - - sinon.spy(repository, 'stageFileSymlinkChange'); - - await wrapper.find('FilePatchView').prop('toggleSymlinkChange')(); - - assert.isTrue(repository.stageFileSymlinkChange.calledWith('waslink.txt')); - }); - - it('unstages non-deletion typechanges normally', async function() { - const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); - const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); - await fs.writeFile(dest, 'asdf\n', 'utf8'); - await fs.symlink(dest, p); - - await repository.stageFiles(['waslink.txt', 'destination']); - await repository.commit('zero'); - - await fs.unlink(p); - - repository.refresh(); - const symlinkPatch = await repository.getFilePatchForPath('waslink.txt', {staged: true}); - const wrapper = shallow(buildApp({filePatch: symlinkPatch, relPath: 'waslink.txt', stagingStatus: 'staged'})); - - sinon.spy(repository, 'unstageFiles'); - - await wrapper.find('FilePatchView').prop('toggleSymlinkChange')(); - - assert.isTrue(repository.unstageFiles.calledWith(['waslink.txt'])); - }); - }); - } - - it('calls discardLines with selected rows', async function() { - const discardLines = sinon.spy(); - const wrapper = shallow(buildApp({discardLines})); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1, 2])); - - await wrapper.find('FilePatchView').prop('discardRows')(); - - const lastArgs = discardLines.lastCall.args; - assert.strictEqual(lastArgs[0], filePatch); - assert.sameMembers(Array.from(lastArgs[1]), [1, 2]); - assert.strictEqual(lastArgs[2], repository); - }); - - it('calls discardLines with explicitly provided rows', async function() { - const discardLines = sinon.spy(); - const wrapper = shallow(buildApp({discardLines})); - wrapper.find('FilePatchView').prop('selectedRowsChanged')(new Set([1, 2])); - - await wrapper.find('FilePatchView').prop('discardRows')(new Set([4, 5]), 'hunk'); - - const lastArgs = discardLines.lastCall.args; - assert.strictEqual(lastArgs[0], filePatch); - assert.sameMembers(Array.from(lastArgs[1]), [4, 5]); - assert.strictEqual(lastArgs[2], repository); - - assert.sameMembers(Array.from(wrapper.find('FilePatchView').prop('selectedRows')), [4, 5]); - assert.strictEqual(wrapper.find('FilePatchView').prop('selectionMode'), 'hunk'); - }); -}); diff --git a/test/controllers/git-tab-controller.test.js b/test/controllers/git-tab-controller.test.js index 27838dd4274..facd99b14b2 100644 --- a/test/controllers/git-tab-controller.test.js +++ b/test/controllers/git-tab-controller.test.js @@ -3,7 +3,6 @@ import path from 'path'; import React from 'react'; import {mount} from 'enzyme'; import dedent from 'dedent-js'; -import until from 'test-until'; import GitTabController from '../../lib/controllers/git-tab-controller'; import {gitTabControllerProps} from '../fixtures/props/git-tab-props'; @@ -213,6 +212,15 @@ describe('GitTabController', function() { assert.equal(stagingView.setFocus.callCount, 1); }); + it('imperatively selects the commit preview button', async function() { + const repository = await buildRepository(await cloneRepository('three-files')); + const wrapper = mount(await buildApp(repository)); + + const focusMethod = sinon.spy(wrapper.find('GitTabView').instance(), 'focusAndSelectCommitPreviewButton'); + wrapper.instance().focusAndSelectCommitPreviewButton(); + assert.isTrue(focusMethod.called); + }); + describe('focus management', function() { it('remembers the last focus reported by the view', async function() { const repository = await buildRepository(await cloneRepository()); @@ -277,165 +285,6 @@ describe('GitTabController', function() { }); }); - describe('keyboard navigation commands', function() { - let wrapper, rootElement, gitTab, stagingView, commitView, commitController, focusElement; - const focuses = GitTabController.focus; - - const extractReferences = () => { - rootElement = wrapper.instance().refRoot.get(); - gitTab = wrapper.instance().refView.get(); - stagingView = wrapper.instance().refStagingView.get(); - commitController = gitTab.refCommitController.get(); - commitView = commitController.refCommitView.get(); - focusElement = stagingView.element; - - const commitViewElements = []; - commitView.refEditorComponent.map(e => commitViewElements.push(e)); - commitView.refAbortMergeButton.map(e => commitViewElements.push(e)); - commitView.refCommitButton.map(e => commitViewElements.push(e)); - - const stubFocus = element => { - sinon.stub(element, 'focus').callsFake(() => { - focusElement = element; - }); - }; - stubFocus(stagingView.refRoot.get()); - for (const e of commitViewElements) { - stubFocus(e); - } - - sinon.stub(commitController, 'hasFocus').callsFake(() => { - return commitViewElements.includes(focusElement); - }); - }; - - const assertSelected = paths => { - const selectionPaths = Array.from(stagingView.state.selection.getSelectedItems()).map(item => item.filePath); - assert.deepEqual(selectionPaths, paths); - }; - - const assertAsyncSelected = paths => { - return assert.async.deepEqual( - Array.from(stagingView.state.selection.getSelectedItems()).map(item => item.filePath), - paths, - ); - }; - - describe('with conflicts and staged files', function() { - beforeEach(async function() { - const workdirPath = await cloneRepository('each-staging-group'); - const repository = await buildRepository(workdirPath); - - // Merge with conflicts - assert.isRejected(repository.git.merge('origin/branch')); - - fs.writeFileSync(path.join(workdirPath, 'unstaged-1.txt'), 'This is an unstaged file.'); - fs.writeFileSync(path.join(workdirPath, 'unstaged-2.txt'), 'This is an unstaged file.'); - fs.writeFileSync(path.join(workdirPath, 'unstaged-3.txt'), 'This is an unstaged file.'); - - // Three staged files - fs.writeFileSync(path.join(workdirPath, 'staged-1.txt'), 'This is a file with some changes staged for commit.'); - fs.writeFileSync(path.join(workdirPath, 'staged-2.txt'), 'This is another file staged for commit.'); - fs.writeFileSync(path.join(workdirPath, 'staged-3.txt'), 'This is a third file staged for commit.'); - await repository.stageFiles(['staged-1.txt', 'staged-2.txt', 'staged-3.txt']); - repository.refresh(); - - wrapper = mount(await buildApp(repository)); - await assert.async.lengthOf(wrapper.update().find('GitTabView').prop('unstagedChanges'), 3); - - extractReferences(); - }); - - it('blurs on tool-panel:unfocus', function() { - sinon.spy(workspace.getActivePane(), 'activate'); - - commandRegistry.dispatch(wrapper.find('.github-Git').getDOMNode(), 'tool-panel:unfocus'); - - assert.isTrue(workspace.getActivePane().activate.called); - }); - - it('advances focus through StagingView groups and CommitView, but does not cycle', async function() { - assertSelected(['unstaged-1.txt']); - - commandRegistry.dispatch(rootElement, 'core:focus-next'); - assertSelected(['conflict-1.txt']); - - commandRegistry.dispatch(rootElement, 'core:focus-next'); - assertSelected(['staged-1.txt']); - - commandRegistry.dispatch(rootElement, 'core:focus-next'); - assertSelected(['staged-1.txt']); - await assert.async.strictEqual(focusElement, wrapper.find('AtomTextEditor').instance()); - - // This should be a no-op. (Actually, it'll insert a tab in the CommitView editor.) - commandRegistry.dispatch(rootElement, 'core:focus-next'); - assertSelected(['staged-1.txt']); - assert.strictEqual(focusElement, wrapper.find('AtomTextEditor').instance()); - }); - - it('retreats focus from the CommitView through StagingView groups, but does not cycle', async function() { - gitTab.setFocus(focuses.EDITOR); - sinon.stub(commitView, 'hasFocusEditor').returns(true); - - commandRegistry.dispatch(rootElement, 'core:focus-previous'); - await assert.async.strictEqual(focusElement, stagingView.refRoot.get()); - assertSelected(['staged-1.txt']); - - commandRegistry.dispatch(rootElement, 'core:focus-previous'); - await assertAsyncSelected(['conflict-1.txt']); - - commandRegistry.dispatch(rootElement, 'core:focus-previous'); - await assertAsyncSelected(['unstaged-1.txt']); - - // This should be a no-op. - commandRegistry.dispatch(rootElement, 'core:focus-previous'); - await assertAsyncSelected(['unstaged-1.txt']); - }); - }); - - describe('with staged changes', function() { - let repository; - - beforeEach(async function() { - const workdirPath = await cloneRepository('each-staging-group'); - repository = await buildRepository(workdirPath); - - // A staged file - fs.writeFileSync(path.join(workdirPath, 'staged-1.txt'), 'This is a file with some changes staged for commit.'); - await repository.stageFiles(['staged-1.txt']); - repository.refresh(); - - const prepareToCommit = () => Promise.resolve(true); - const ensureGitTab = () => Promise.resolve(false); - - wrapper = mount(await buildApp(repository, {ensureGitTab, prepareToCommit})); - - extractReferences(); - await assert.async.isTrue(commitView.props.stagedChangesExist); - }); - - it('focuses the CommitView on github:commit with an empty commit message', async function() { - commitView.refEditorModel.map(e => e.setText('')); - sinon.spy(wrapper.instance(), 'commit'); - wrapper.update(); - - commandRegistry.dispatch(workspaceElement, 'github:commit'); - - await assert.async.strictEqual(focusElement, wrapper.find('AtomTextEditor').instance()); - assert.isFalse(wrapper.instance().commit.called); - }); - - it('creates a commit on github:commit with a nonempty commit message', async function() { - commitView.refEditorModel.map(e => e.setText('I fixed the things')); - sinon.spy(repository, 'commit'); - - commandRegistry.dispatch(workspaceElement, 'github:commit'); - - await until('Commit method called', () => repository.commit.calledWith('I fixed the things')); - }); - }); - }); - describe('integration tests', function() { it('can stage and unstage files and commit', async function() { const workdirPath = await cloneRepository('three-files'); diff --git a/test/controllers/multi-file-patch-controller.test.js b/test/controllers/multi-file-patch-controller.test.js new file mode 100644 index 00000000000..aed313dd767 --- /dev/null +++ b/test/controllers/multi-file-patch-controller.test.js @@ -0,0 +1,496 @@ +import path from 'path'; +import fs from 'fs-extra'; +import React from 'react'; +import {shallow} from 'enzyme'; + +import MultiFilePatchController from '../../lib/controllers/multi-file-patch-controller'; +import MultiFilePatch from '../../lib/models/patch/multi-file-patch'; +import * as reporterProxy from '../../lib/reporter-proxy'; +import {multiFilePatchBuilder} from '../builder/patch'; +import {cloneRepository, buildRepository} from '../helpers'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; + +describe('MultiFilePatchController', function() { + let atomEnv, repository, multiFilePatch, filePatch; + + beforeEach(async function() { + atomEnv = global.buildAtomEnvironment(); + + const workdirPath = await cloneRepository(); + repository = await buildRepository(workdirPath); + + // a.txt: unstaged changes + const filePath = 'a.txt'; + await fs.writeFile(path.join(workdirPath, filePath), '00\n01\n02\n03\n04\n05\n06'); + + multiFilePatch = await repository.getFilePatchForPath(filePath); + [filePatch] = multiFilePatch.getFilePatches(); + }); + + afterEach(function() { + atomEnv.destroy(); + }); + + function buildApp(overrideProps = {}) { + const props = { + repository, + stagingStatus: 'unstaged', + multiFilePatch, + hasUndoHistory: false, + workspace: atomEnv.workspace, + commands: atomEnv.commands, + keymaps: atomEnv.keymaps, + tooltips: atomEnv.tooltips, + config: atomEnv.config, + destroy: () => {}, + discardLines: () => {}, + undoLastDiscard: () => {}, + surface: () => {}, + itemType: CommitPreviewItem, + ...overrideProps, + }; + + return ; + } + + it('passes extra props to the FilePatchView', function() { + const extra = Symbol('extra'); + const wrapper = shallow(buildApp({extra})); + + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('extra'), extra); + }); + + it('calls undoLastDiscard through with set arguments', function() { + const undoLastDiscard = sinon.spy(); + const wrapper = shallow(buildApp({undoLastDiscard, stagingStatus: 'staged'})); + + wrapper.find('MultiFilePatchView').prop('undoLastDiscard')(filePatch); + + assert.isTrue(undoLastDiscard.calledWith(filePatch.getPath(), repository)); + }); + + describe('diveIntoMirrorPatch()', function() { + it('destroys the current pane and opens the staged changes', async function() { + const destroy = sinon.spy(); + sinon.stub(atomEnv.workspace, 'open').resolves(); + const wrapper = shallow(buildApp({stagingStatus: 'unstaged', destroy})); + + await wrapper.find('MultiFilePatchView').prop('diveIntoMirrorPatch')(filePatch); + + assert.isTrue(destroy.called); + assert.isTrue(atomEnv.workspace.open.calledWith( + `atom-github://file-patch/${filePatch.getPath()}` + + `?workdir=${encodeURIComponent(repository.getWorkingDirectoryPath())}&stagingStatus=staged`, + )); + }); + + it('destroys the current pane and opens the unstaged changes', async function() { + const destroy = sinon.spy(); + sinon.stub(atomEnv.workspace, 'open').resolves(); + const wrapper = shallow(buildApp({stagingStatus: 'staged', destroy})); + + + await wrapper.find('MultiFilePatchView').prop('diveIntoMirrorPatch')(filePatch); + + assert.isTrue(destroy.called); + assert.isTrue(atomEnv.workspace.open.calledWith( + `atom-github://file-patch/${filePatch.getPath()}` + + `?workdir=${encodeURIComponent(repository.getWorkingDirectoryPath())}&stagingStatus=unstaged`, + )); + }); + }); + + describe('openFile()', function() { + it('opens an editor on the current file', async function() { + const wrapper = shallow(buildApp({stagingStatus: 'unstaged'})); + const editor = await wrapper.find('MultiFilePatchView').prop('openFile')(filePatch, []); + + assert.strictEqual(editor.getPath(), path.join(repository.getWorkingDirectoryPath(), filePatch.getPath())); + }); + + it('sets the cursor to a single position', async function() { + const wrapper = shallow(buildApp({relPath: 'a.txt', stagingStatus: 'unstaged'})); + const editor = await wrapper.find('MultiFilePatchView').prop('openFile')(filePatch, [[1, 1]]); + + assert.deepEqual(editor.getCursorBufferPositions().map(p => p.serialize()), [[1, 1]]); + }); + + it('adds cursors at a set of positions', async function() { + const wrapper = shallow(buildApp({stagingStatus: 'unstaged'})); + const editor = await wrapper.find('MultiFilePatchView').prop('openFile')(filePatch, [[1, 1], [3, 1], [5, 0]]); + + assert.deepEqual(editor.getCursorBufferPositions().map(p => p.serialize()), [[1, 1], [3, 1], [5, 0]]); + }); + }); + + describe('toggleFile()', function() { + it('stages the current file if unstaged', async function() { + sinon.spy(repository, 'stageFiles'); + const wrapper = shallow(buildApp({stagingStatus: 'unstaged'})); + + await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch); + + assert.isTrue(repository.stageFiles.calledWith([filePatch.getPath()])); + }); + + it('unstages the current file if staged', async function() { + sinon.spy(repository, 'unstageFiles'); + const wrapper = shallow(buildApp({stagingStatus: 'staged'})); + + await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch); + + assert.isTrue(repository.unstageFiles.calledWith([filePatch.getPath()])); + }); + + it('is a no-op if a staging operation is already in progress', async function() { + sinon.stub(repository, 'stageFiles').resolves('staged'); + sinon.stub(repository, 'unstageFiles').resolves('unstaged'); + + const wrapper = shallow(buildApp({stagingStatus: 'unstaged'})); + assert.strictEqual(await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch), 'staged'); + + // No-op + assert.isNull(await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch)); + + // Simulate an identical patch arriving too soon + wrapper.setProps({multiFilePatch: multiFilePatch.clone()}); + + // Still a no-op + assert.isNull(await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch)); + + // Simulate updated patch arrival + const promise = wrapper.instance().patchChangePromise; + wrapper.setProps({multiFilePatch: new MultiFilePatch({})}); + await promise; + + // Performs an operation again + assert.strictEqual(await wrapper.find('MultiFilePatchView').prop('toggleFile')(filePatch), 'staged'); + }); + }); + + describe('selected row and selection mode tracking', function() { + it('captures the selected row set', function() { + const wrapper = shallow(buildApp()); + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), []); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + assert.isFalse(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'line', true); + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [1, 2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'line'); + assert.isTrue(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + }); + + it('does not re-render if the row set, selection mode, and file spanning are unchanged', function() { + const wrapper = shallow(buildApp()); + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), []); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + assert.isFalse(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + + sinon.spy(wrapper.instance(), 'render'); + + // All changed + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'line', true); + + assert.isTrue(wrapper.instance().render.called); + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [1, 2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'line'); + assert.isTrue(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + + // Nothing changed + wrapper.instance().render.resetHistory(); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([2, 1]), 'line', true); + + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [1, 2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'line'); + assert.isTrue(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + assert.isFalse(wrapper.instance().render.called); + + // Selection mode changed + wrapper.instance().render.resetHistory(); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'hunk', true); + + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [1, 2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + assert.isTrue(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + assert.isTrue(wrapper.instance().render.called); + + // Selection file spanning changed + wrapper.instance().render.resetHistory(); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'hunk', false); + + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [1, 2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + assert.isFalse(wrapper.find('MultiFilePatchView').prop('hasMultipleFileSelections')); + assert.isTrue(wrapper.instance().render.called); + }); + + describe('discardRows()', function() { + it('records an event', async function() { + const wrapper = shallow(buildApp()); + sinon.stub(reporterProxy, 'addEvent'); + await wrapper.find('MultiFilePatchView').prop('discardRows')(new Set([1, 2]), 'hunk'); + assert.isTrue(reporterProxy.addEvent.calledWith('discard-unstaged-changes', { + package: 'github', + component: 'MultiFilePatchController', + lineCount: 2, + eventSource: undefined, + })); + }); + + it('is a no-op when multiple patches are present', async function() { + const {multiFilePatch: mfp} = multiFilePatchBuilder() + .addFilePatch() + .addFilePatch() + .build(); + const discardLines = sinon.spy(); + const wrapper = shallow(buildApp({discardLines, multiFilePatch: mfp})); + sinon.stub(reporterProxy, 'addEvent'); + await wrapper.find('MultiFilePatchView').prop('discardRows')(new Set([1, 2])); + assert.isFalse(reporterProxy.addEvent.called); + assert.isFalse(discardLines.called); + }); + }); + + describe('undoLastDiscard()', function() { + it('records an event', function() { + const wrapper = shallow(buildApp()); + sinon.stub(reporterProxy, 'addEvent'); + wrapper.find('MultiFilePatchView').prop('undoLastDiscard')(filePatch); + assert.isTrue(reporterProxy.addEvent.calledWith('undo-last-discard', { + package: 'github', + component: 'MultiFilePatchController', + eventSource: undefined, + })); + }); + }); + }); + + describe('toggleRows()', function() { + it('is a no-op with no selected rows', async function() { + const wrapper = shallow(buildApp()); + + sinon.spy(repository, 'applyPatchToIndex'); + + await wrapper.find('MultiFilePatchView').prop('toggleRows')(); + assert.isFalse(repository.applyPatchToIndex.called); + }); + + it('applies a stage patch to the index', async function() { + const wrapper = shallow(buildApp()); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1]), 'hunk', false); + + sinon.spy(multiFilePatch, 'getStagePatchForLines'); + sinon.spy(repository, 'applyPatchToIndex'); + + await wrapper.find('MultiFilePatchView').prop('toggleRows')(); + + assert.sameMembers(Array.from(multiFilePatch.getStagePatchForLines.lastCall.args[0]), [1]); + assert.isTrue(repository.applyPatchToIndex.calledWith(multiFilePatch.getStagePatchForLines.returnValues[0])); + }); + + it('toggles a different row set if provided', async function() { + const wrapper = shallow(buildApp()); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1]), 'line', false); + + sinon.spy(multiFilePatch, 'getStagePatchForLines'); + sinon.spy(repository, 'applyPatchToIndex'); + + await wrapper.find('MultiFilePatchView').prop('toggleRows')(new Set([2]), 'hunk'); + + assert.sameMembers(Array.from(multiFilePatch.getStagePatchForLines.lastCall.args[0]), [2]); + assert.isTrue(repository.applyPatchToIndex.calledWith(multiFilePatch.getStagePatchForLines.returnValues[0])); + + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [2]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + }); + + it('applies an unstage patch to the index', async function() { + await repository.stageFiles(['a.txt']); + const otherPatch = await repository.getFilePatchForPath('a.txt', {staged: true}); + const wrapper = shallow(buildApp({multiFilePatch: otherPatch, stagingStatus: 'staged'})); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([2]), 'hunk', false); + + sinon.spy(otherPatch, 'getUnstagePatchForLines'); + sinon.spy(repository, 'applyPatchToIndex'); + + await wrapper.find('MultiFilePatchView').prop('toggleRows')(new Set([2]), 'hunk'); + + assert.sameMembers(Array.from(otherPatch.getUnstagePatchForLines.lastCall.args[0]), [2]); + assert.isTrue(repository.applyPatchToIndex.calledWith(otherPatch.getUnstagePatchForLines.returnValues[0])); + }); + }); + + if (process.platform !== 'win32') { + describe('toggleModeChange()', function() { + it("it stages an unstaged file's new mode", async function() { + const p = path.join(repository.getWorkingDirectoryPath(), 'a.txt'); + await fs.chmod(p, 0o755); + repository.refresh(); + const newMultiFilePatch = await repository.getFilePatchForPath('a.txt', {staged: false}); + + const wrapper = shallow(buildApp({filePatch: newMultiFilePatch, stagingStatus: 'unstaged'})); + const [newFilePatch] = newMultiFilePatch.getFilePatches(); + + sinon.spy(repository, 'stageFileModeChange'); + await wrapper.find('MultiFilePatchView').prop('toggleModeChange')(newFilePatch); + + assert.isTrue(repository.stageFileModeChange.calledWith('a.txt', '100755')); + }); + + it("it stages a staged file's old mode", async function() { + const p = path.join(repository.getWorkingDirectoryPath(), 'a.txt'); + await fs.chmod(p, 0o755); + await repository.stageFiles(['a.txt']); + repository.refresh(); + const newMultiFilePatch = await repository.getFilePatchForPath('a.txt', {staged: true}); + const [newFilePatch] = newMultiFilePatch.getFilePatches(); + + const wrapper = shallow(buildApp({filePatch: newMultiFilePatch, stagingStatus: 'staged'})); + + sinon.spy(repository, 'stageFileModeChange'); + await wrapper.find('MultiFilePatchView').prop('toggleModeChange')(newFilePatch); + + assert.isTrue(repository.stageFileModeChange.calledWith('a.txt', '100644')); + }); + }); + + describe('toggleSymlinkChange', function() { + it('handles an addition and typechange with a special repository method', async function() { + if (process.env.ATOM_GITHUB_SKIP_SYMLINKS) { + this.skip(); + return; + } + + const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); + const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); + await fs.writeFile(dest, 'asdf\n', 'utf8'); + await fs.symlink(dest, p); + + await repository.stageFiles(['waslink.txt', 'destination']); + await repository.commit('zero'); + + await fs.unlink(p); + await fs.writeFile(p, 'fdsa\n', 'utf8'); + + repository.refresh(); + const symlinkMultiPatch = await repository.getFilePatchForPath('waslink.txt', {staged: false}); + const wrapper = shallow(buildApp({filePatch: symlinkMultiPatch, relPath: 'waslink.txt', stagingStatus: 'unstaged'})); + const [symlinkPatch] = symlinkMultiPatch.getFilePatches(); + + sinon.spy(repository, 'stageFileSymlinkChange'); + + await wrapper.find('MultiFilePatchView').prop('toggleSymlinkChange')(symlinkPatch); + + assert.isTrue(repository.stageFileSymlinkChange.calledWith('waslink.txt')); + }); + + it('stages non-addition typechanges normally', async function() { + if (process.env.ATOM_GITHUB_SKIP_SYMLINKS) { + this.skip(); + return; + } + + const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); + const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); + await fs.writeFile(dest, 'asdf\n', 'utf8'); + await fs.symlink(dest, p); + + await repository.stageFiles(['waslink.txt', 'destination']); + await repository.commit('zero'); + + await fs.unlink(p); + + repository.refresh(); + const symlinkMultiPatch = await repository.getFilePatchForPath('waslink.txt', {staged: false}); + const wrapper = shallow(buildApp({filePatch: symlinkMultiPatch, relPath: 'waslink.txt', stagingStatus: 'unstaged'})); + + sinon.spy(repository, 'stageFiles'); + + const [symlinkPatch] = symlinkMultiPatch.getFilePatches(); + await wrapper.find('MultiFilePatchView').prop('toggleSymlinkChange')(symlinkPatch); + + assert.isTrue(repository.stageFiles.calledWith(['waslink.txt'])); + }); + + it('handles a deletion and typechange with a special repository method', async function() { + const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); + const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); + await fs.writeFile(dest, 'asdf\n', 'utf8'); + await fs.writeFile(p, 'fdsa\n', 'utf8'); + + await repository.stageFiles(['waslink.txt', 'destination']); + await repository.commit('zero'); + + await fs.unlink(p); + await fs.symlink(dest, p); + await repository.stageFiles(['waslink.txt']); + + repository.refresh(); + const symlinkMultiPatch = await repository.getFilePatchForPath('waslink.txt', {staged: true}); + const wrapper = shallow(buildApp({filePatch: symlinkMultiPatch, relPath: 'waslink.txt', stagingStatus: 'staged'})); + + sinon.spy(repository, 'stageFileSymlinkChange'); + + const [symlinkPatch] = symlinkMultiPatch.getFilePatches(); + await wrapper.find('MultiFilePatchView').prop('toggleSymlinkChange')(symlinkPatch); + + assert.isTrue(repository.stageFileSymlinkChange.calledWith('waslink.txt')); + }); + + it('unstages non-deletion typechanges normally', async function() { + const p = path.join(repository.getWorkingDirectoryPath(), 'waslink.txt'); + const dest = path.join(repository.getWorkingDirectoryPath(), 'destination'); + await fs.writeFile(dest, 'asdf\n', 'utf8'); + await fs.symlink(dest, p); + + await repository.stageFiles(['waslink.txt', 'destination']); + await repository.commit('zero'); + + await fs.unlink(p); + + await repository.stageFiles(['waslink.txt']); + + repository.refresh(); + const symlinkMultiPatch = await repository.getFilePatchForPath('waslink.txt', {staged: true}); + const wrapper = shallow(buildApp({multiFilePatch: symlinkMultiPatch, relPath: 'waslink.txt', stagingStatus: 'staged'})); + + sinon.spy(repository, 'unstageFiles'); + + const [symlinkPatch] = symlinkMultiPatch.getFilePatches(); + await wrapper.find('MultiFilePatchView').prop('toggleSymlinkChange')(symlinkPatch); + + assert.isTrue(repository.unstageFiles.calledWith(['waslink.txt'])); + }); + }); + } + + it('calls discardLines with selected rows', async function() { + const discardLines = sinon.spy(); + const wrapper = shallow(buildApp({discardLines})); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'hunk', false); + + await wrapper.find('MultiFilePatchView').prop('discardRows')(); + + const lastArgs = discardLines.lastCall.args; + assert.strictEqual(lastArgs[0], multiFilePatch); + assert.sameMembers(Array.from(lastArgs[1]), [1, 2]); + assert.strictEqual(lastArgs[2], repository); + }); + + it('calls discardLines with explicitly provided rows', async function() { + const discardLines = sinon.spy(); + const wrapper = shallow(buildApp({discardLines})); + wrapper.find('MultiFilePatchView').prop('selectedRowsChanged')(new Set([1, 2]), 'hunk', false); + + await wrapper.find('MultiFilePatchView').prop('discardRows')(new Set([4, 5]), 'hunk'); + + const lastArgs = discardLines.lastCall.args; + assert.strictEqual(lastArgs[0], multiFilePatch); + assert.sameMembers(Array.from(lastArgs[1]), [4, 5]); + assert.strictEqual(lastArgs[2], repository); + + assert.sameMembers(Array.from(wrapper.find('MultiFilePatchView').prop('selectedRows')), [4, 5]); + assert.strictEqual(wrapper.find('MultiFilePatchView').prop('selectionMode'), 'hunk'); + }); +}); diff --git a/test/controllers/root-controller.test.js b/test/controllers/root-controller.test.js index 4d94f594ea8..7edebaab67b 100644 --- a/test/controllers/root-controller.test.js +++ b/test/controllers/root-controller.test.js @@ -6,6 +6,7 @@ import {shallow, mount} from 'enzyme'; import dedent from 'dedent-js'; import {cloneRepository, buildRepository} from '../helpers'; +import {multiFilePatchBuilder} from '../builder/patch'; import {GitError} from '../../lib/git-shell-out-strategy'; import Repository from '../../lib/models/repository'; import WorkdirContextPool from '../../lib/models/workdir-context-pool'; @@ -15,6 +16,7 @@ import GitTabItem from '../../lib/items/git-tab-item'; import GitHubTabItem from '../../lib/items/github-tab-item'; import ResolutionProgress from '../../lib/models/conflicts/resolution-progress'; import IssueishDetailItem from '../../lib/items/issueish-detail-item'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; import * as reporterProxy from '../../lib/reporter-proxy'; import RootController from '../../lib/controllers/root-controller'; @@ -487,13 +489,31 @@ describe('RootController', function() { }); describe('discarding and restoring changed lines', () => { - describe('discardLines(filePatch, lines)', () => { + describe('discardLines(multiFilePatch, lines)', () => { + it('is a no-op when multiple FilePatches are present', async () => { + const workdirPath = await cloneRepository('three-files'); + const repository = await buildRepository(workdirPath); + + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch() + .addFilePatch() + .build(); + + sinon.spy(repository, 'applyPatchToWorkdir'); + + const wrapper = shallow(React.cloneElement(app, {repository})); + await wrapper.instance().discardLines(multiFilePatch, new Set([0])); + + assert.isFalse(repository.applyPatchToWorkdir.called); + }); + it('only discards lines if buffer is unmodified, otherwise notifies user', async () => { const workdirPath = await cloneRepository('three-files'); const repository = await buildRepository(workdirPath); fs.writeFileSync(path.join(workdirPath, 'a.txt'), 'modification\n'); - const unstagedFilePatch = await repository.getFilePatchForPath('a.txt'); + const multiFilePatch = await repository.getFilePatchForPath('a.txt'); + const unstagedFilePatch = multiFilePatch.getFilePatches()[0]; const editor = await workspace.open(path.join(workdirPath, 'a.txt')); @@ -510,14 +530,14 @@ describe('RootController', function() { sinon.stub(notificationManager, 'addError'); // unmodified buffer const hunkLines = unstagedFilePatch.getHunks()[0].getBufferRows(); - await wrapper.instance().discardLines(unstagedFilePatch, new Set([hunkLines[0]])); + await wrapper.instance().discardLines(multiFilePatch, new Set([hunkLines[0]])); assert.isTrue(repository.applyPatchToWorkdir.calledOnce); assert.isFalse(notificationManager.addError.called); // modified buffer repository.applyPatchToWorkdir.reset(); editor.setText('modify contents'); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows())); + await wrapper.instance().discardLines(multiFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows())); assert.isFalse(repository.applyPatchToWorkdir.called); const notificationArgs = notificationManager.addError.args[0]; assert.equal(notificationArgs[0], 'Cannot discard lines.'); @@ -559,34 +579,34 @@ describe('RootController', function() { describe('undoLastDiscard(partialDiscardFilePath)', () => { describe('when partialDiscardFilePath is not null', () => { - let unstagedFilePatch, repository, absFilePath, wrapper; + let multiFilePatch, repository, absFilePath, wrapper; + beforeEach(async () => { const workdirPath = await cloneRepository('multi-line-file'); repository = await buildRepository(workdirPath); absFilePath = path.join(workdirPath, 'sample.js'); fs.writeFileSync(absFilePath, 'foo\nbar\nbaz\n'); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); + multiFilePatch = await repository.getFilePatchForPath('sample.js'); app = React.cloneElement(app, {repository}); wrapper = shallow(app); - wrapper.setState({ - filePath: 'sample.js', - filePatch: unstagedFilePatch, - stagingStatus: 'unstaged', - }); }); it('reverses last discard for file path', async () => { const contents1 = fs.readFileSync(absFilePath, 'utf8'); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(0, 2))); + + const rows0 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(0, 2)); + await wrapper.instance().discardLines(multiFilePatch, rows0, repository); const contents2 = fs.readFileSync(absFilePath, 'utf8'); + assert.notEqual(contents1, contents2); await repository.refresh(); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); - wrapper.setState({filePatch: unstagedFilePatch}); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(2, 4))); + multiFilePatch = await repository.getFilePatchForPath('sample.js'); + + const rows1 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(2, 4)); + await wrapper.instance().discardLines(multiFilePatch, rows1); const contents3 = fs.readFileSync(absFilePath, 'utf8'); assert.notEqual(contents2, contents3); @@ -598,7 +618,8 @@ describe('RootController', function() { it('does not undo if buffer is modified', async () => { const contents1 = fs.readFileSync(absFilePath, 'utf8'); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(0, 2))); + const rows0 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(0, 2)); + await wrapper.instance().discardLines(multiFilePatch, rows0); const contents2 = fs.readFileSync(absFilePath, 'utf8'); assert.notEqual(contents1, contents2); @@ -610,8 +631,6 @@ describe('RootController', function() { sinon.stub(notificationManager, 'addError'); await repository.refresh(); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); - wrapper.setState({filePatch: unstagedFilePatch}); await wrapper.instance().undoLastDiscard('sample.js'); const notificationArgs = notificationManager.addError.args[0]; assert.equal(notificationArgs[0], 'Cannot undo last discard.'); @@ -622,7 +641,8 @@ describe('RootController', function() { describe('when file content has changed since last discard', () => { it('successfully undoes discard if changes do not conflict', async () => { const contents1 = fs.readFileSync(absFilePath, 'utf8'); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(0, 2))); + const rows0 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(0, 2)); + await wrapper.instance().discardLines(multiFilePatch, rows0); const contents2 = fs.readFileSync(absFilePath, 'utf8'); assert.notEqual(contents1, contents2); @@ -631,8 +651,6 @@ describe('RootController', function() { fs.writeFileSync(absFilePath, contents2 + change); await repository.refresh(); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); - wrapper.setState({filePatch: unstagedFilePatch}); await wrapper.instance().undoLastDiscard('sample.js'); await assert.async.equal(fs.readFileSync(absFilePath, 'utf8'), contents1 + change); @@ -642,7 +660,8 @@ describe('RootController', function() { await repository.git.exec(['config', 'merge.conflictstyle', 'diff3']); const contents1 = fs.readFileSync(absFilePath, 'utf8'); - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(0, 2))); + const rows0 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(0, 2)); + await wrapper.instance().discardLines(multiFilePatch, rows0); const contents2 = fs.readFileSync(absFilePath, 'utf8'); assert.notEqual(contents1, contents2); @@ -651,8 +670,6 @@ describe('RootController', function() { fs.writeFileSync(absFilePath, change + contents2); await repository.refresh(); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); - wrapper.setState({filePatch: unstagedFilePatch}); // click 'Cancel' confirm.returns(2); @@ -705,11 +722,13 @@ describe('RootController', function() { it('clears the discard history if the last blob is no longer valid', async () => { // this would occur in the case of garbage collection cleaning out the blob - await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(0, 2))); + const rows0 = new Set(multiFilePatch.getFilePatches()[0].getHunks()[0].getBufferRows().slice(0, 2)); + await wrapper.instance().discardLines(multiFilePatch, rows0); await repository.refresh(); - unstagedFilePatch = await repository.getFilePatchForPath('sample.js'); - wrapper.setState({filePatch: unstagedFilePatch}); - const {beforeSha} = await wrapper.instance().discardLines(unstagedFilePatch, new Set(unstagedFilePatch.getHunks()[0].getBufferRows().slice(2, 4))); + + const multiFilePatch1 = await repository.getFilePatchForPath('sample.js'); + const rows1 = new Set(multiFilePatch1.getFilePatches()[0].getHunks()[0].getBufferRows().slice(2, 4)); + const {beforeSha} = await wrapper.instance().discardLines(multiFilePatch1, rows1); // remove blob from git object store fs.unlinkSync(path.join(repository.getGitDirectoryPath(), 'objects', beforeSha.slice(0, 2), beforeSha.slice(2))); @@ -989,13 +1008,13 @@ describe('RootController', function() { editor.setCursorBufferPosition([7, 0]); // TODO: too implementation-detail-y - const filePatchItem = { + const changedFileItem = { goToDiffLine: sinon.spy(), focus: sinon.spy(), getRealItemPromise: () => Promise.resolve(), getFilePatchLoadedPromise: () => Promise.resolve(), }; - sinon.stub(workspace, 'open').returns(filePatchItem); + sinon.stub(workspace, 'open').returns(changedFileItem); await wrapper.instance().viewUnstagedChangesForCurrentFile(); await assert.async.equal(workspace.open.callCount, 1); @@ -1003,9 +1022,9 @@ describe('RootController', function() { `atom-github://file-patch/a.txt?workdir=${encodeURIComponent(workdirPath)}&stagingStatus=unstaged`, {pending: true, activatePane: true, activateItem: true}, ]); - await assert.async.equal(filePatchItem.goToDiffLine.callCount, 1); - assert.deepEqual(filePatchItem.goToDiffLine.args[0], [8]); - assert.equal(filePatchItem.focus.callCount, 1); + await assert.async.equal(changedFileItem.goToDiffLine.callCount, 1); + assert.deepEqual(changedFileItem.goToDiffLine.args[0], [8]); + assert.equal(changedFileItem.focus.callCount, 1); }); it('does nothing on an untitled buffer', async function() { @@ -1034,13 +1053,13 @@ describe('RootController', function() { editor.setCursorBufferPosition([7, 0]); // TODO: too implementation-detail-y - const filePatchItem = { + const changedFileItem = { goToDiffLine: sinon.spy(), focus: sinon.spy(), getRealItemPromise: () => Promise.resolve(), getFilePatchLoadedPromise: () => Promise.resolve(), }; - sinon.stub(workspace, 'open').returns(filePatchItem); + sinon.stub(workspace, 'open').returns(changedFileItem); await wrapper.instance().viewStagedChangesForCurrentFile(); await assert.async.equal(workspace.open.callCount, 1); @@ -1048,9 +1067,9 @@ describe('RootController', function() { `atom-github://file-patch/a.txt?workdir=${encodeURIComponent(workdirPath)}&stagingStatus=staged`, {pending: true, activatePane: true, activateItem: true}, ]); - await assert.async.equal(filePatchItem.goToDiffLine.callCount, 1); - assert.deepEqual(filePatchItem.goToDiffLine.args[0], [8]); - assert.equal(filePatchItem.focus.callCount, 1); + await assert.async.equal(changedFileItem.goToDiffLine.callCount, 1); + assert.deepEqual(changedFileItem.goToDiffLine.args[0], [8]); + assert.equal(changedFileItem.focus.callCount, 1); }); it('does nothing on an untitled buffer', async function() { @@ -1088,6 +1107,31 @@ describe('RootController', function() { }); }); + describe('opening a CommitPreviewItem', function() { + it('registers an opener for CommitPreviewItems', async function() { + const workdir = await cloneRepository('three-files'); + const repository = await buildRepository(workdir); + const wrapper = mount(React.cloneElement(app, {repository})); + + const uri = CommitPreviewItem.buildURI(workdir); + const item = await atomEnv.workspace.open(uri); + + assert.strictEqual(item.getTitle(), 'Commit preview'); + assert.lengthOf(wrapper.update().find('CommitPreviewItem'), 1); + }); + + it('registers a command to toggle the commit preview item', async function() { + const workdir = await cloneRepository('three-files'); + const repository = await buildRepository(workdir); + const wrapper = mount(React.cloneElement(app, {repository})); + assert.isFalse(wrapper.find('CommitPreviewItem').exists()); + + atomEnv.commands.dispatch(workspace.getElement(), 'github:toggle-commit-preview'); + + assert.lengthOf(wrapper.update().find('CommitPreviewItem'), 1); + }); + }); + describe('context commands trigger event reporting', function() { let wrapper; @@ -1132,4 +1176,28 @@ describe('RootController', function() { assert.isFalse(reporterProxy.addEvent.called); }); }); + + describe('surfaceToCommitPreviewButton', function() { + it('focuses and selects the commit preview button', async function() { + const repository = await buildRepository(await cloneRepository('multiple-commits')); + app = React.cloneElement(app, { + repository, + startOpen: true, + startRevealed: true, + }); + const wrapper = mount(app); + + const gitTabTracker = wrapper.instance().gitTabTracker; + + const gitTab = { + focusAndSelectCommitPreviewButton: sinon.spy(), + }; + + sinon.stub(gitTabTracker, 'getComponent').returns(gitTab); + + wrapper.instance().surfaceToCommitPreviewButton(); + assert.isTrue(gitTab.focusAndSelectCommitPreviewButton.called); + }); + }); + }); diff --git a/test/git-strategies.test.js b/test/git-strategies.test.js index ccd3b6a8061..ee74e7c3f27 100644 --- a/test/git-strategies.test.js +++ b/test/git-strategies.test.js @@ -661,6 +661,32 @@ import * as reporterProxy from '../lib/reporter-proxy'; }); }); + describe('getStagedChangesPatch', function() { + it('returns an empty patch if there are no staged files', async function() { + const workdir = await cloneRepository('three-files'); + const git = createTestStrategy(workdir); + const mp = await git.getStagedChangesPatch(); + assert.lengthOf(mp, 0); + }); + + it('returns a combined diff of all staged files', async function() { + const workdir = await cloneRepository('each-staging-group'); + const git = createTestStrategy(workdir); + + await assert.isRejected(git.merge('origin/branch')); + await fs.writeFile(path.join(workdir, 'unstaged-1.txt'), 'Unstaged file'); + await fs.writeFile(path.join(workdir, 'unstaged-2.txt'), 'Unstaged file'); + + await fs.writeFile(path.join(workdir, 'staged-1.txt'), 'Staged file'); + await fs.writeFile(path.join(workdir, 'staged-2.txt'), 'Staged file'); + await fs.writeFile(path.join(workdir, 'staged-3.txt'), 'Staged file'); + await git.stageFiles(['staged-1.txt', 'staged-2.txt', 'staged-3.txt']); + + const diffs = await git.getStagedChangesPatch(); + assert.deepEqual(diffs.map(diff => diff.newPath), ['staged-1.txt', 'staged-2.txt', 'staged-3.txt']); + }); + }); + describe('isMerging', function() { it('returns true if `.git/MERGE_HEAD` exists', async function() { const workingDirPath = await cloneRepository('merge-conflict'); @@ -1289,6 +1315,12 @@ import * as reporterProxy from '../lib/reporter-proxy'; const expectedFileMode = process.platform === 'win32' ? '100644' : '100755'; assert.equal(await git.getFileMode('new-file.txt'), expectedFileMode); }); + + it('returns the file mode for symlink file', async function() { + const workingDirPath = await cloneRepository('symlinks'); + const git = createTestStrategy(workingDirPath); + assert.equal(await git.getFileMode('symlink.txt'), 120000); + }); }); describe('merging files', function() { diff --git a/test/github-package.test.js b/test/github-package.test.js index 5d1c0fbf876..91678983d0e 100644 --- a/test/github-package.test.js +++ b/test/github-package.test.js @@ -702,4 +702,34 @@ describe('GithubPackage', function() { ); }); }); + + describe('stub item creation', function() { + beforeEach(function() { + sinon.spy(githubPackage, 'rerender'); + }); + + describe('before the initial render', function() { + it('creates a stub item for a commit preview item', function() { + const item = githubPackage.createCommitPreviewStub({uri: 'atom-github://commit-preview'}); + + assert.isFalse(githubPackage.rerender.called); + assert.strictEqual(item.getTitle(), 'Commit preview'); + assert.strictEqual(item.getURI(), 'atom-github://commit-preview'); + }); + }); + + describe('after the initial render', function() { + beforeEach(function() { + githubPackage.controller = Symbol('controller'); + }); + + it('creates a stub item for a commit preview item', function() { + const item = githubPackage.createCommitPreviewStub({uri: 'atom-github://commit-preview'}); + + assert.isTrue(githubPackage.rerender.called); + assert.strictEqual(item.getTitle(), 'Commit preview'); + assert.strictEqual(item.getURI(), 'atom-github://commit-preview'); + }); + }); + }); }); diff --git a/test/helpers.js b/test/helpers.js index 3a16307d373..ba2a7c98972 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -99,6 +99,7 @@ export async function setUpLocalAndRemoteRepositories(repoName = 'multiple-commi await localGit.exec(['config', '--local', 'commit.gpgsign', 'false']); await localGit.exec(['config', '--local', 'user.email', FAKE_USER.email]); await localGit.exec(['config', '--local', 'user.name', FAKE_USER.name]); + await localGit.exec(['config', '--local', 'pull.rebase', false]); return {baseRepoPath, remoteRepoPath, localRepoPath}; } @@ -157,8 +158,9 @@ export function assertEqualSortedArraysByKey(arr1, arr2, key) { // Helpers for test/models/patch classes class PatchBufferAssertions { - constructor(patch) { + constructor(patch, buffer) { this.patch = patch; + this.buffer = buffer; } hunk(hunkIndex, {startRow, endRow, header, regions}) { @@ -175,7 +177,7 @@ class PatchBufferAssertions { const spec = regions[i]; assert.strictEqual(region.constructor.name.toLowerCase(), spec.kind); - assert.strictEqual(region.toStringIn(this.patch.getBuffer()), spec.string); + assert.strictEqual(region.toStringIn(this.buffer), spec.string); assert.deepEqual(region.getRange().serialize(), spec.range); } } @@ -188,12 +190,12 @@ class PatchBufferAssertions { } } -export function assertInPatch(patch) { - return new PatchBufferAssertions(patch); +export function assertInPatch(patch, buffer) { + return new PatchBufferAssertions(patch, buffer); } -export function assertInFilePatch(filePatch) { - return assertInPatch(filePatch.getPatch()); +export function assertInFilePatch(filePatch, buffer) { + return assertInPatch(filePatch.getPatch(), buffer); } let activeRenderers = []; diff --git a/test/integration/file-patch.test.js b/test/integration/file-patch.test.js index 84649d67b24..9b8c5fc59d2 100644 --- a/test/integration/file-patch.test.js +++ b/test/integration/file-patch.test.js @@ -75,15 +75,15 @@ describe('integration: file patches', function() { listItem.simulate('mousedown', {button: 0, persist() {}}); window.dispatchEvent(new MouseEvent('mouseup')); - const itemSelector = `FilePatchItem[relPath="${relativePath}"][stagingStatus="${stagingStatus}"]`; + const itemSelector = `ChangedFileItem[relPath="${relativePath}"][stagingStatus="${stagingStatus}"]`; await until( () => wrapper.update().find(itemSelector).find('.github-FilePatchView').exists(), - `the FilePatchItem for ${relativePath} arrives and loads`, + `the ChangedFileItem for ${relativePath} arrives and loads`, ); } function getPatchItem(stagingStatus, relativePath) { - return wrapper.update().find(`FilePatchItem[relPath="${relativePath}"][stagingStatus="${stagingStatus}"]`); + return wrapper.update().find(`ChangedFileItem[relPath="${relativePath}"][stagingStatus="${stagingStatus}"]`); } function getPatchEditor(stagingStatus, relativePath) { @@ -511,6 +511,12 @@ describe('integration: file patches', function() { getPatchEditor('unstaged', 'sample.js').selectAll(); getPatchItem('unstaged', 'sample.js').find('.github-HunkHeaderView-stageButton').simulate('click'); + await patchContent( + 'unstaged', 'sample.js', + [repoPath('target.txt'), 'selected'], + [' No newline at end of file'], + ); + assert.isTrue(getPatchItem('unstaged', 'sample.js').find('.github-FilePatchView-metaTitle').exists()); await clickFileInGitTab('staged', 'sample.js'); @@ -740,19 +746,24 @@ describe('integration: file patches', function() { [[2, 0], [2, 0]], [[10, 0], [10, 0]], ]); + getPatchItem('unstaged', 'sample.js').find('.github-HunkHeaderView-stageButton').simulate('click'); + // in the case of multiple selections, the next selection is calculated based on bottom most selection + // When the bottom most changed line in a diff is staged/unstaged, then the new bottom most changed + // line is selected. + // Essentially we want to keep the selection close to where it was, for ease of keyboard navigation. await patchContent( 'unstaged', 'sample.js', ['const quicksort = function() {'], [' const sort = function(items) {'], - [' let pivot = items.shift(), current, left = [], right = [];', 'deleted', 'selected'], + [' let pivot = items.shift(), current, left = [], right = [];', 'deleted'], [' while (items.length > 0) {'], [' current = items.shift();'], [' current < pivot ? left.push(current) : right.push(current);'], [' }'], [' return sort(left).concat(pivot).concat(sort(right));'], - [' // added 0', 'added'], + [' // added 0', 'added', 'selected'], [' // added 1'], [' };'], [''], @@ -861,14 +872,14 @@ describe('integration: file patches', function() { 'staged', 'sample.js', ['const quicksort = function() {'], [' const sort = function(items) {'], - [' if (items.length <= 1) { return items; }', 'deleted', 'selected'], + [' if (items.length <= 1) { return items; }', 'deleted'], [' let pivot = items.shift(), current, left = [], right = [];'], [' while (items.length > 0) {'], [' current = items.shift();'], [' current < pivot ? left.push(current) : right.push(current);'], [' }'], [' return sort(left).concat(pivot).concat(sort(right));'], - [' // added 0', 'added'], + [' // added 0', 'added', 'selected'], [' };'], [''], [' return sort(Array.apply(this, arguments));'], diff --git a/test/items/file-patch-item.test.js b/test/items/changed-file-item.test.js similarity index 87% rename from test/items/file-patch-item.test.js rename to test/items/changed-file-item.test.js index 00467f1d2fc..59f8d131011 100644 --- a/test/items/file-patch-item.test.js +++ b/test/items/changed-file-item.test.js @@ -3,11 +3,11 @@ import React from 'react'; import {mount} from 'enzyme'; import PaneItem from '../../lib/atom/pane-item'; -import FilePatchItem from '../../lib/items/file-patch-item'; +import ChangedFileItem from '../../lib/items/changed-file-item'; import WorkdirContextPool from '../../lib/models/workdir-context-pool'; import {cloneRepository} from '../helpers'; -describe('FilePatchItem', function() { +describe('ChangedFileItem', function() { let atomEnv, repository, pool; beforeEach(async function() { @@ -41,10 +41,10 @@ describe('FilePatchItem', function() { }; return ( - + {({itemHolder, params}) => { return ( - {}, + undoLastDiscard: () => {}, + surfaceToCommitPreviewButton: () => {}, + ...override, + }; + + return ( + + {({itemHolder, params}) => { + return ( + + ); + }} + + ); + } + + function open(wrapper, options = {}) { + const opts = { + workingDirectory: repository.getWorkingDirectoryPath(), + ...options, + }; + const uri = CommitPreviewItem.buildURI(opts.workingDirectory); + return atomEnv.workspace.open(uri); + } + + it('constructs and opens the correct URI', async function() { + const wrapper = mount(buildPaneApp()); + await open(wrapper); + + assert.isTrue(wrapper.update().find('CommitPreviewItem').exists()); + }); + + it('passes extra props to its container', async function() { + const extra = Symbol('extra'); + const wrapper = mount(buildPaneApp({extra})); + await open(wrapper); + + assert.strictEqual(wrapper.update().find('CommitPreviewContainer').prop('extra'), extra); + }); + + it('locates the repository from the context pool', async function() { + const wrapper = mount(buildPaneApp()); + await open(wrapper); + + assert.strictEqual(wrapper.update().find('CommitPreviewContainer').prop('repository'), repository); + }); + + it('passes an absent repository if the working directory is unrecognized', async function() { + const wrapper = mount(buildPaneApp()); + await open(wrapper, {workingDirectory: '/nah'}); + + assert.isTrue(wrapper.update().find('CommitPreviewContainer').prop('repository').isAbsent()); + }); + + it('returns a fixed title and icon', async function() { + const wrapper = mount(buildPaneApp()); + const item = await open(wrapper); + + assert.strictEqual(item.getTitle(), 'Commit preview'); + assert.strictEqual(item.getIconName(), 'git-commit'); + }); + + it('terminates pending state', async function() { + const wrapper = mount(buildPaneApp()); + + const item = await open(wrapper); + const callback = sinon.spy(); + const sub = item.onDidTerminatePendingState(callback); + + assert.strictEqual(callback.callCount, 0); + item.terminatePendingState(); + assert.strictEqual(callback.callCount, 1); + item.terminatePendingState(); + assert.strictEqual(callback.callCount, 1); + + sub.dispose(); + }); + + it('may be destroyed once', async function() { + const wrapper = mount(buildPaneApp()); + + const item = await open(wrapper); + const callback = sinon.spy(); + const sub = item.onDidDestroy(callback); + + assert.strictEqual(callback.callCount, 0); + item.destroy(); + assert.strictEqual(callback.callCount, 1); + + sub.dispose(); + }); + + it('serializes itself as a CommitPreviewStub', async function() { + const wrapper = mount(buildPaneApp()); + const item0 = await open(wrapper, {workingDirectory: '/dir0'}); + assert.deepEqual(item0.serialize(), { + deserializer: 'CommitPreviewStub', + uri: 'atom-github://commit-preview?workdir=%2Fdir0', + }); + + const item1 = await open(wrapper, {workingDirectory: '/dir1'}); + assert.deepEqual(item1.serialize(), { + deserializer: 'CommitPreviewStub', + uri: 'atom-github://commit-preview?workdir=%2Fdir1', + }); + }); + + it('has an item-level accessor for the current working directory', async function() { + const wrapper = mount(buildPaneApp()); + const item = await open(wrapper, {workingDirectory: '/dir7'}); + assert.strictEqual(item.getWorkingDirectory(), '/dir7'); + }); + + describe('focus()', function() { + it('imperatively focuses the value of the initial focus ref', async function() { + const wrapper = mount(buildPaneApp()); + const item = await open(wrapper); + + const focusSpy = {focus: sinon.spy()}; + item.refInitialFocus.setter(focusSpy); + + item.focus(); + + assert.isTrue(focusSpy.focus.called); + }); + + it('is a no-op if there is no initial focus ref', async function() { + const wrapper = mount(buildPaneApp()); + const item = await open(wrapper); + + item.refInitialFocus.setter(null); + + item.focus(); + }); + }); +}); diff --git a/test/items/git-tab-item.test.js b/test/items/git-tab-item.test.js index 4fa74cfa5af..09be4becd05 100644 --- a/test/items/git-tab-item.test.js +++ b/test/items/git-tab-item.test.js @@ -52,4 +52,25 @@ describe('GitTabItem', function() { .find(item => item.getURI() === 'atom-github://dock-item/git'); assert.strictEqual(paneItem.getTitle(), 'Git'); }); + + it('forwards imperative focus manipulation methods to its controller', async function() { + const wrapper = mount(buildApp()); + await atomEnv.workspace.open(GitTabItem.buildURI()); + await assert.async.isTrue(wrapper.update().find('GitTabController').exists()); + + const focusMethods = [ + 'focusAndSelectStagingItem', + 'focusAndSelectCommitPreviewButton', + ]; + + const spies = focusMethods.reduce((map, focusMethod) => { + map[focusMethod] = sinon.stub(wrapper.find('GitTabController').instance(), focusMethod); + return map; + }, {}); + + for (const method of focusMethods) { + wrapper.find('GitTabItem').instance()[method](); + assert.isTrue(spies[method].called); + } + }); }); diff --git a/test/models/patch/builder.test.js b/test/models/patch/builder.test.js index 6b2ce62401e..8e64488f05a 100644 --- a/test/models/patch/builder.test.js +++ b/test/models/patch/builder.test.js @@ -1,17 +1,19 @@ -import {buildFilePatch} from '../../../lib/models/patch'; -import {assertInPatch} from '../../helpers'; +import {buildFilePatch, buildMultiFilePatch} from '../../../lib/models/patch'; +import {assertInPatch, assertInFilePatch} from '../../helpers'; describe('buildFilePatch', function() { it('returns a null patch for an empty diff list', function() { - const p = buildFilePatch([]); - assert.isFalse(p.getOldFile().isPresent()); - assert.isFalse(p.getNewFile().isPresent()); - assert.isFalse(p.getPatch().isPresent()); + const multiFilePatch = buildFilePatch([]); + const [filePatch] = multiFilePatch.getFilePatches(); + + assert.isFalse(filePatch.getOldFile().isPresent()); + assert.isFalse(filePatch.getNewFile().isPresent()); + assert.isFalse(filePatch.getPatch().isPresent()); }); describe('with a single diff', function() { it('assembles a patch from non-symlink sides', function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '100644', newPath: 'new/path', @@ -64,18 +66,22 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.strictEqual(p.getOldPath(), 'old/path'); assert.strictEqual(p.getOldMode(), '100644'); assert.strictEqual(p.getNewPath(), 'new/path'); assert.strictEqual(p.getNewMode(), '100755'); assert.strictEqual(p.getPatch().getStatus(), 'modified'); - const buffer = + const bufferText = 'line-0\nline-1\nline-2\nline-3\nline-4\nline-5\nline-6\nline-7\nline-8\nline-9\nline-10\n' + 'line-11\nline-12\nline-13\nline-14\nline-15\nline-16\nline-17\nline-18\n'; - assert.strictEqual(p.getBuffer().getText(), buffer); + assert.strictEqual(buffer.getText(), bufferText); - assertInPatch(p).hunks( + assertInPatch(p, buffer).hunks( { startRow: 0, endRow: 8, @@ -113,7 +119,7 @@ describe('buildFilePatch', function() { }); it("sets the old file's symlink destination", function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '120000', newPath: 'new/path', @@ -130,12 +136,14 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); assert.strictEqual(p.getOldSymlink(), 'old/destination'); assert.isNull(p.getNewSymlink()); }); it("sets the new file's symlink destination", function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '100644', newPath: 'new/path', @@ -152,12 +160,14 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); assert.isNull(p.getOldSymlink()); assert.strictEqual(p.getNewSymlink(), 'new/destination'); }); it("sets both files' symlink destinations", function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '120000', newPath: 'new/path', @@ -178,12 +188,14 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); assert.strictEqual(p.getOldSymlink(), 'old/destination'); assert.strictEqual(p.getNewSymlink(), 'new/destination'); }); it('assembles a patch from a file deletion', function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '100644', newPath: null, @@ -206,16 +218,20 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.isTrue(p.getOldFile().isPresent()); assert.strictEqual(p.getOldPath(), 'old/path'); assert.strictEqual(p.getOldMode(), '100644'); assert.isFalse(p.getNewFile().isPresent()); assert.strictEqual(p.getPatch().getStatus(), 'deleted'); - const buffer = 'line-0\nline-1\nline-2\nline-3\n\n'; - assert.strictEqual(p.getBuffer().getText(), buffer); + const bufferText = 'line-0\nline-1\nline-2\nline-3\n\n'; + assert.strictEqual(buffer.getText(), bufferText); - assertInPatch(p).hunks( + assertInPatch(p, buffer).hunks( { startRow: 0, endRow: 4, @@ -228,7 +244,7 @@ describe('buildFilePatch', function() { }); it('assembles a patch from a file addition', function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: null, oldMode: null, newPath: 'new/path', @@ -249,16 +265,20 @@ describe('buildFilePatch', function() { ], }]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.isFalse(p.getOldFile().isPresent()); assert.isTrue(p.getNewFile().isPresent()); assert.strictEqual(p.getNewPath(), 'new/path'); assert.strictEqual(p.getNewMode(), '100755'); assert.strictEqual(p.getPatch().getStatus(), 'added'); - const buffer = 'line-0\nline-1\nline-2\n'; - assert.strictEqual(p.getBuffer().getText(), buffer); + const bufferText = 'line-0\nline-1\nline-2\n'; + assert.strictEqual(buffer.getText(), bufferText); - assertInPatch(p).hunks( + assertInPatch(p, buffer).hunks( { startRow: 0, endRow: 2, @@ -284,7 +304,7 @@ describe('buildFilePatch', function() { }); it('parses a no-newline marker', function() { - const p = buildFilePatch([{ + const multiFilePatch = buildFilePatch([{ oldPath: 'old/path', oldMode: '100644', newPath: 'new/path', @@ -295,9 +315,12 @@ describe('buildFilePatch', function() { ]}], }]); - assert.strictEqual(p.getBuffer().getText(), 'line-0\nline-1\n No newline at end of file\n'); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.strictEqual(buffer.getText(), 'line-0\nline-1\n No newline at end of file\n'); - assertInPatch(p).hunks({ + assertInPatch(p, buffer).hunks({ startRow: 0, endRow: 2, header: '@@ -0,1 +0,1 @@', @@ -312,7 +335,7 @@ describe('buildFilePatch', function() { describe('with a mode change and a content diff', function() { it('identifies a file that was deleted and replaced by a symlink', function() { - const p = buildFilePatch([ + const multiFilePatch = buildFilePatch([ { oldPath: 'the-path', oldMode: '000000', @@ -347,6 +370,10 @@ describe('buildFilePatch', function() { }, ]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.strictEqual(p.getOldPath(), 'the-path'); assert.strictEqual(p.getOldMode(), '100644'); assert.isNull(p.getOldSymlink()); @@ -355,8 +382,8 @@ describe('buildFilePatch', function() { assert.strictEqual(p.getNewSymlink(), 'the-destination'); assert.strictEqual(p.getStatus(), 'deleted'); - assert.strictEqual(p.getBuffer().getText(), 'line-0\nline-1\n'); - assertInPatch(p).hunks({ + assert.strictEqual(buffer.getText(), 'line-0\nline-1\n'); + assertInPatch(p, buffer).hunks({ startRow: 0, endRow: 1, header: '@@ -0,0 +0,2 @@', @@ -367,7 +394,7 @@ describe('buildFilePatch', function() { }); it('identifies a symlink that was deleted and replaced by a file', function() { - const p = buildFilePatch([ + const multiFilePatch = buildFilePatch([ { oldPath: 'the-path', oldMode: '120000', @@ -402,6 +429,10 @@ describe('buildFilePatch', function() { }, ]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.strictEqual(p.getOldPath(), 'the-path'); assert.strictEqual(p.getOldMode(), '120000'); assert.strictEqual(p.getOldSymlink(), 'the-destination'); @@ -410,8 +441,8 @@ describe('buildFilePatch', function() { assert.isNull(p.getNewSymlink()); assert.strictEqual(p.getStatus(), 'added'); - assert.strictEqual(p.getBuffer().getText(), 'line-0\nline-1\n'); - assertInPatch(p).hunks({ + assert.strictEqual(buffer.getText(), 'line-0\nline-1\n'); + assertInPatch(p, buffer).hunks({ startRow: 0, endRow: 1, header: '@@ -0,2 +0,0 @@', @@ -422,7 +453,7 @@ describe('buildFilePatch', function() { }); it('is indifferent to the order of the diffs', function() { - const p = buildFilePatch([ + const multiFilePatch = buildFilePatch([ { oldMode: '100644', newPath: 'the-path', @@ -456,6 +487,10 @@ describe('buildFilePatch', function() { }, ]); + assert.lengthOf(multiFilePatch.getFilePatches(), 1); + const [p] = multiFilePatch.getFilePatches(); + const buffer = multiFilePatch.getBuffer(); + assert.strictEqual(p.getOldPath(), 'the-path'); assert.strictEqual(p.getOldMode(), '100644'); assert.isNull(p.getOldSymlink()); @@ -464,8 +499,8 @@ describe('buildFilePatch', function() { assert.strictEqual(p.getNewSymlink(), 'the-destination'); assert.strictEqual(p.getStatus(), 'deleted'); - assert.strictEqual(p.getBuffer().getText(), 'line-0\nline-1\n'); - assertInPatch(p).hunks({ + assert.strictEqual(buffer.getText(), 'line-0\nline-1\n'); + assertInPatch(p, buffer).hunks({ startRow: 0, endRow: 1, header: '@@ -0,0 +0,2 @@', @@ -501,6 +536,270 @@ describe('buildFilePatch', function() { }); }); + describe('with multiple diffs', function() { + it('creates a MultiFilePatch containing each', function() { + const mp = buildMultiFilePatch([ + { + oldPath: 'first', oldMode: '100644', newPath: 'first', newMode: '100755', status: 'modified', + hunks: [ + { + oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 4, + lines: [ + ' line-0', + '+line-1', + '+line-2', + ' line-3', + ], + }, + { + oldStartLine: 10, oldLineCount: 3, newStartLine: 12, newLineCount: 2, + lines: [ + ' line-4', + '-line-5', + ' line-6', + ], + }, + ], + }, + { + oldPath: 'second', oldMode: '100644', newPath: 'second', newMode: '100644', status: 'modified', + hunks: [ + { + oldStartLine: 5, oldLineCount: 3, newStartLine: 5, newLineCount: 3, + lines: [ + ' line-5', + '+line-6', + '-line-7', + ' line-8', + ], + }, + ], + }, + { + oldPath: 'third', oldMode: '100755', newPath: 'third', newMode: '100755', status: 'added', + hunks: [ + { + oldStartLine: 1, oldLineCount: 0, newStartLine: 1, newLineCount: 3, + lines: [ + '+line-0', + '+line-1', + '+line-2', + ], + }, + ], + }, + ]); + + const buffer = mp.getBuffer(); + + assert.lengthOf(mp.getFilePatches(), 3); + + assert.strictEqual( + mp.getBuffer().getText(), + 'line-0\nline-1\nline-2\nline-3\nline-4\nline-5\nline-6\n' + + 'line-5\nline-6\nline-7\nline-8\n' + + 'line-0\nline-1\nline-2\n', + ); + + assert.strictEqual(mp.getFilePatches()[0].getOldPath(), 'first'); + assert.deepEqual(mp.getFilePatches()[0].getMarker().getRange().serialize(), [[0, 0], [6, 6]]); + assertInFilePatch(mp.getFilePatches()[0], buffer).hunks( + { + startRow: 0, endRow: 3, header: '@@ -1,2 +1,4 @@', regions: [ + {kind: 'unchanged', string: ' line-0\n', range: [[0, 0], [0, 6]]}, + {kind: 'addition', string: '+line-1\n+line-2\n', range: [[1, 0], [2, 6]]}, + {kind: 'unchanged', string: ' line-3\n', range: [[3, 0], [3, 6]]}, + ], + }, + { + startRow: 4, endRow: 6, header: '@@ -10,3 +12,2 @@', regions: [ + {kind: 'unchanged', string: ' line-4\n', range: [[4, 0], [4, 6]]}, + {kind: 'deletion', string: '-line-5\n', range: [[5, 0], [5, 6]]}, + {kind: 'unchanged', string: ' line-6\n', range: [[6, 0], [6, 6]]}, + ], + }, + ); + assert.strictEqual(mp.getFilePatches()[1].getOldPath(), 'second'); + assert.deepEqual(mp.getFilePatches()[1].getMarker().getRange().serialize(), [[7, 0], [10, 6]]); + assertInFilePatch(mp.getFilePatches()[1], buffer).hunks( + { + startRow: 7, endRow: 10, header: '@@ -5,3 +5,3 @@', regions: [ + {kind: 'unchanged', string: ' line-5\n', range: [[7, 0], [7, 6]]}, + {kind: 'addition', string: '+line-6\n', range: [[8, 0], [8, 6]]}, + {kind: 'deletion', string: '-line-7\n', range: [[9, 0], [9, 6]]}, + {kind: 'unchanged', string: ' line-8\n', range: [[10, 0], [10, 6]]}, + ], + }, + ); + assert.strictEqual(mp.getFilePatches()[2].getOldPath(), 'third'); + assert.deepEqual(mp.getFilePatches()[2].getMarker().getRange().serialize(), [[11, 0], [13, 6]]); + assertInFilePatch(mp.getFilePatches()[2], buffer).hunks( + { + startRow: 11, endRow: 13, header: '@@ -1,0 +1,3 @@', regions: [ + {kind: 'addition', string: '+line-0\n+line-1\n+line-2\n', range: [[11, 0], [13, 6]]}, + ], + }, + ); + }); + + it('identifies mode and content change pairs within the patch list', function() { + const mp = buildMultiFilePatch([ + { + oldPath: 'first', oldMode: '100644', newPath: 'first', newMode: '100755', status: 'modified', + hunks: [ + { + oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 3, + lines: [ + ' line-0', + '+line-1', + ' line-2', + ], + }, + ], + }, + { + oldPath: 'was-non-symlink', oldMode: '100644', newPath: 'was-non-symlink', newMode: '000000', status: 'deleted', + hunks: [ + { + oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 0, + lines: ['-line-0', '-line-1'], + }, + ], + }, + { + oldPath: 'was-symlink', oldMode: '000000', newPath: 'was-symlink', newMode: '100755', status: 'added', + hunks: [ + { + oldStartLine: 1, oldLineCount: 0, newStartLine: 1, newLineCount: 2, + lines: ['+line-0', '+line-1'], + }, + ], + }, + { + oldMode: '100644', newPath: 'third', newMode: '100644', status: 'deleted', + hunks: [ + { + oldStartLine: 1, oldLineCount: 3, newStartLine: 1, newLineCount: 0, + lines: ['-line-0', '-line-1', '-line-2'], + }, + ], + }, + { + oldPath: 'was-symlink', oldMode: '120000', newPath: 'was-non-symlink', newMode: '000000', status: 'deleted', + hunks: [ + { + oldStartLine: 1, oldLineCount: 0, newStartLine: 0, newLineCount: 0, + lines: ['-was-symlink-destination'], + }, + ], + }, + { + oldPath: 'was-non-symlink', oldMode: '000000', newPath: 'was-non-symlink', newMode: '120000', status: 'added', + hunks: [ + { + oldStartLine: 1, oldLineCount: 0, newStartLine: 1, newLineCount: 1, + lines: ['+was-non-symlink-destination'], + }, + ], + }, + ]); + + const buffer = mp.getBuffer(); + + assert.lengthOf(mp.getFilePatches(), 4); + const [fp0, fp1, fp2, fp3] = mp.getFilePatches(); + + assert.strictEqual(fp0.getOldPath(), 'first'); + assertInFilePatch(fp0, buffer).hunks({ + startRow: 0, endRow: 2, header: '@@ -1,2 +1,3 @@', regions: [ + {kind: 'unchanged', string: ' line-0\n', range: [[0, 0], [0, 6]]}, + {kind: 'addition', string: '+line-1\n', range: [[1, 0], [1, 6]]}, + {kind: 'unchanged', string: ' line-2\n', range: [[2, 0], [2, 6]]}, + ], + }); + + assert.strictEqual(fp1.getOldPath(), 'was-non-symlink'); + assert.isTrue(fp1.hasTypechange()); + assert.strictEqual(fp1.getNewSymlink(), 'was-non-symlink-destination'); + assertInFilePatch(fp1, buffer).hunks({ + startRow: 3, endRow: 4, header: '@@ -1,2 +1,0 @@', regions: [ + {kind: 'deletion', string: '-line-0\n-line-1\n', range: [[3, 0], [4, 6]]}, + ], + }); + + assert.strictEqual(fp2.getOldPath(), 'was-symlink'); + assert.isTrue(fp2.hasTypechange()); + assert.strictEqual(fp2.getOldSymlink(), 'was-symlink-destination'); + assertInFilePatch(fp2, buffer).hunks({ + startRow: 5, endRow: 6, header: '@@ -1,0 +1,2 @@', regions: [ + {kind: 'addition', string: '+line-0\n+line-1\n', range: [[5, 0], [6, 6]]}, + ], + }); + + assert.strictEqual(fp3.getNewPath(), 'third'); + assertInFilePatch(fp3, buffer).hunks({ + startRow: 7, endRow: 9, header: '@@ -1,3 +1,0 @@', regions: [ + {kind: 'deletion', string: '-line-0\n-line-1\n-line-2\n', range: [[7, 0], [9, 6]]}, + ], + }); + }); + + it('sets the correct marker range for diffs with no hunks', function() { + const mp = buildMultiFilePatch([ + { + oldPath: 'first', oldMode: '100644', newPath: 'first', newMode: '100755', status: 'modified', + hunks: [ + { + oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 4, + lines: [ + ' line-0', + '+line-1', + '+line-2', + ' line-3', + ], + }, + { + oldStartLine: 10, oldLineCount: 3, newStartLine: 12, newLineCount: 2, + lines: [ + ' line-4', + '-line-5', + ' line-6', + ], + }, + ], + }, + { + oldPath: 'second', oldMode: '100644', newPath: 'second', newMode: '100755', status: 'modified', + hunks: [], + }, + { + oldPath: 'third', oldMode: '100755', newPath: 'third', newMode: '100755', status: 'added', + hunks: [ + { + oldStartLine: 5, oldLineCount: 3, newStartLine: 5, newLineCount: 3, + lines: [ + ' line-5', + '+line-6', + '-line-7', + ' line-8', + ], + }, + ], + }, + ]); + + assert.strictEqual(mp.getFilePatches()[0].getOldPath(), 'first'); + assert.deepEqual(mp.getFilePatches()[0].getMarker().getRange().serialize(), [[0, 0], [6, 6]]); + + assert.strictEqual(mp.getFilePatches()[1].getOldPath(), 'second'); + assert.deepEqual(mp.getFilePatches()[1].getHunks(), []); + assert.deepEqual(mp.getFilePatches()[1].getMarker().getRange().serialize(), [[7, 0], [7, 0]]); + + assert.strictEqual(mp.getFilePatches()[2].getOldPath(), 'third'); + assert.deepEqual(mp.getFilePatches()[2].getMarker().getRange().serialize(), [[7, 0], [10, 6]]); + }); + }); + it('throws an error with an unexpected number of diffs', function() { assert.throws(() => buildFilePatch([1, 2, 3]), /Unexpected number of diffs: 3/); }); diff --git a/test/models/patch/file-patch.test.js b/test/models/patch/file-patch.test.js index 43b7401b7bd..abc0b5551d9 100644 --- a/test/models/patch/file-patch.test.js +++ b/test/models/patch/file-patch.test.js @@ -21,9 +21,11 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch); + const patch = new Patch({status: 'modified', hunks, marker}); const oldFile = new File({path: 'a.txt', mode: '120000', symlink: 'dest.txt'}); const newFile = new File({path: 'b.txt', mode: '100755'}); + const filePatch = new FilePatch(oldFile, newFile, patch); assert.isTrue(filePatch.isPresent()); @@ -36,29 +38,8 @@ describe('FilePatch', function() { assert.strictEqual(filePatch.getNewMode(), '100755'); assert.isUndefined(filePatch.getNewSymlink()); - assert.strictEqual(filePatch.getByteSize(), 15); - assert.strictEqual(filePatch.getBuffer().getText(), '0000\n0001\n0002\n'); + assert.strictEqual(filePatch.getMarker(), marker); assert.strictEqual(filePatch.getMaxLineNumberWidth(), 1); - - assert.strictEqual(filePatch.getHunkAt(1), hunks[0]); - - const nBuffer = new TextBuffer({text: '0001\n0002\n'}); - const nLayers = buildLayers(nBuffer); - const nHunks = [ - new Hunk({ - oldStartRow: 3, oldRowCount: 1, newStartRow: 3, newRowCount: 2, - marker: markRange(nLayers.hunk, 0, 1), - regions: [ - new Unchanged(markRange(nLayers.unchanged, 0)), - new Addition(markRange(nLayers.addition, 1)), - ], - }), - ]; - const nPatch = new Patch({status: 'modified', hunks: nHunks, buffer: nBuffer, layers: nLayers}); - const nFilePatch = new FilePatch(oldFile, newFile, nPatch); - - const range = nFilePatch.getNextSelectionRange(filePatch, new Set([1])); - assert.deepEqual(range, [[1, 0], [1, Infinity]]); }); it('accesses a file path from either side of the patch', function() { @@ -74,157 +55,27 @@ describe('FilePatch', function() { assert.isNull(new FilePatch(nullFile, nullFile, patch).getPath()); }); - it('iterates addition and deletion ranges from all hunks', function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n0005\n0006\n0007\n0008\n0009\n'}); + it('returns the starting range of the patch', function() { + const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n'}); const layers = buildLayers(buffer); const hunks = [ new Hunk({ - oldStartRow: 1, oldRowCount: 0, newStartRow: 1, newRowCount: 0, - marker: markRange(layers.hunk, 0, 9), + oldStartRow: 2, oldRowCount: 1, newStartRow: 2, newRowCount: 3, + marker: markRange(layers.hunk, 1, 3), regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1)), - new Unchanged(markRange(layers.unchanged, 2)), - new Addition(markRange(layers.addition, 3)), - new Deletion(markRange(layers.deletion, 4)), - new Addition(markRange(layers.addition, 5, 6)), - new Deletion(markRange(layers.deletion, 7)), - new Addition(markRange(layers.addition, 8)), - new Unchanged(markRange(layers.unchanged, 9)), + new Unchanged(markRange(layers.unchanged, 1)), + new Addition(markRange(layers.addition, 2, 3)), ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 1, 3); + const patch = new Patch({status: 'modified', hunks, buffer, layers, marker}); const oldFile = new File({path: 'a.txt', mode: '100644'}); const newFile = new File({path: 'a.txt', mode: '100644'}); - const filePatch = new FilePatch(oldFile, newFile, patch); - - const additionRanges = filePatch.getAdditionRanges(); - assert.deepEqual(additionRanges.map(range => range.serialize()), [ - [[1, 0], [1, 4]], - [[3, 0], [3, 4]], - [[5, 0], [6, 4]], - [[8, 0], [8, 4]], - ]); - - const deletionRanges = filePatch.getDeletionRanges(); - assert.deepEqual(deletionRanges.map(range => range.serialize()), [ - [[4, 0], [4, 4]], - [[7, 0], [7, 4]], - ]); - - const noNewlineRanges = filePatch.getNoNewlineRanges(); - assert.lengthOf(noNewlineRanges, 0); - }); - it('returns an empty nonewline range if no hunks are present', function() { - const buffer = new TextBuffer(); - const layers = buildLayers(buffer); - const patch = new Patch({status: 'modified', hunks: [], buffer, layers}); - const oldFile = new File({path: 'a.txt', mode: '100644'}); - const newFile = new File({path: 'a.txt', mode: '100644'}); const filePatch = new FilePatch(oldFile, newFile, patch); - assert.lengthOf(filePatch.getNoNewlineRanges(), 0); - }); - - it('returns a nonewline range if one is present', function() { - const buffer = new TextBuffer({text: '0000\n No newline at end of file\n'}); - const layers = buildLayers(buffer); - const hunks = [ - new Hunk({ - oldStartRow: 1, oldRowCount: 0, newStartRow: 1, newRowCount: 0, - marker: markRange(layers.hunk, 0, 1), - regions: [ - new Addition(markRange(layers.addition, 0)), - new NoNewline(markRange(layers.noNewline, 1)), - ], - }), - ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); - const oldFile = new File({path: 'a.txt', mode: '100644'}); - const newFile = new File({path: 'a.txt', mode: '100644'}); - const filePatch = new FilePatch(oldFile, newFile, patch); - - const noNewlineRanges = filePatch.getNoNewlineRanges(); - assert.deepEqual(noNewlineRanges.map(range => range.serialize()), [ - [[1, 0], [1, 26]], - ]); - }); - - it('adopts a buffer and layers from a prior FilePatch', function() { - const oldFile = new File({path: 'a.txt', mode: '100755'}); - const newFile = new File({path: 'b.txt', mode: '100755'}); - - const prevBuffer = new TextBuffer({text: '0000\n0001\n0002\n'}); - const prevLayers = buildLayers(prevBuffer); - const prevHunks = [ - new Hunk({ - oldStartRow: 2, oldRowCount: 2, newStartRow: 2, newRowCount: 3, - marker: markRange(prevLayers.hunk, 0, 2), - regions: [ - new Unchanged(markRange(prevLayers.unchanged, 0)), - new Addition(markRange(prevLayers.addition, 1)), - new Unchanged(markRange(prevLayers.unchanged, 2)), - ], - }), - ]; - const prevPatch = new Patch({status: 'modified', hunks: prevHunks, buffer: prevBuffer, layers: prevLayers}); - const prevFilePatch = new FilePatch(oldFile, newFile, prevPatch); - - const nextBuffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n No newline at end of file'}); - const nextLayers = buildLayers(nextBuffer); - const nextHunks = [ - new Hunk({ - oldStartRow: 2, oldRowCount: 2, newStartRow: 2, newRowCount: 3, - marker: markRange(nextLayers.hunk, 0, 2), - regions: [ - new Unchanged(markRange(nextLayers.unchanged, 0)), - new Addition(markRange(nextLayers.addition, 1)), - new Unchanged(markRange(nextLayers.unchanged, 2)), - ], - }), - new Hunk({ - oldStartRow: 10, oldRowCount: 2, newStartRow: 11, newRowCount: 1, - marker: markRange(nextLayers.hunk, 3, 5), - regions: [ - new Unchanged(markRange(nextLayers.unchanged, 3)), - new Deletion(markRange(nextLayers.deletion, 4)), - new NoNewline(markRange(nextLayers.noNewline, 5)), - ], - }), - ]; - const nextPatch = new Patch({status: 'modified', hunks: nextHunks, buffer: nextBuffer, layers: nextLayers}); - const nextFilePatch = new FilePatch(oldFile, newFile, nextPatch); - - nextFilePatch.adoptBufferFrom(prevFilePatch); - - assert.strictEqual(nextFilePatch.getBuffer(), prevBuffer); - assert.strictEqual(nextFilePatch.getHunkLayer(), prevLayers.hunk); - assert.strictEqual(nextFilePatch.getUnchangedLayer(), prevLayers.unchanged); - assert.strictEqual(nextFilePatch.getAdditionLayer(), prevLayers.addition); - assert.strictEqual(nextFilePatch.getDeletionLayer(), prevLayers.deletion); - assert.strictEqual(nextFilePatch.getNoNewlineLayer(), prevLayers.noNewline); - - const rangesFrom = layer => layer.getMarkers().map(marker => marker.getRange().serialize()); - assert.deepEqual(rangesFrom(nextFilePatch.getHunkLayer()), [ - [[0, 0], [2, 4]], - [[3, 0], [5, 26]], - ]); - assert.deepEqual(rangesFrom(nextFilePatch.getUnchangedLayer()), [ - [[0, 0], [0, 4]], - [[2, 0], [2, 4]], - [[3, 0], [3, 4]], - ]); - assert.deepEqual(rangesFrom(nextFilePatch.getAdditionLayer()), [ - [[1, 0], [1, 4]], - ]); - assert.deepEqual(rangesFrom(nextFilePatch.getDeletionLayer()), [ - [[4, 0], [4, 4]], - ]); - assert.deepEqual(rangesFrom(nextFilePatch.getNoNewlineLayer()), [ - [[5, 0], [5, 26]], - ]); + assert.deepEqual(filePatch.getStartRange().serialize(), [[1, 0], [1, 0]]); }); describe('file-level change detection', function() { @@ -318,7 +169,15 @@ describe('FilePatch', function() { assert.strictEqual(clone3.getPatch(), patch1); }); - describe('getStagePatchForLines()', function() { + describe('buildStagePatchForLines()', function() { + let stagedLayeredBuffer; + + beforeEach(function() { + const buffer = new TextBuffer(); + const layers = buildLayers(buffer); + stagedLayeredBuffer = {buffer, layers}; + }); + it('returns a new FilePatch that applies only the selected lines', function() { const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n'}); const layers = buildLayers(buffer); @@ -334,17 +193,18 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 4); + const patch = new Patch({status: 'modified', hunks, marker}); const oldFile = new File({path: 'file.txt', mode: '100644'}); const newFile = new File({path: 'file.txt', mode: '100644'}); const filePatch = new FilePatch(oldFile, newFile, patch); - const stagedPatch = filePatch.getStagePatchForLines(new Set([1, 3])); + const stagedPatch = filePatch.buildStagePatchForLines(buffer, stagedLayeredBuffer, new Set([1, 3])); assert.strictEqual(stagedPatch.getStatus(), 'modified'); assert.strictEqual(stagedPatch.getOldFile(), oldFile); assert.strictEqual(stagedPatch.getNewFile(), newFile); - assert.strictEqual(stagedPatch.getBuffer().getText(), '0000\n0001\n0003\n0004\n'); - assertInFilePatch(stagedPatch).hunks( + assert.strictEqual(stagedLayeredBuffer.buffer.getText(), '0000\n0001\n0003\n0004\n'); + assertInFilePatch(stagedPatch, stagedLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 3, @@ -360,10 +220,11 @@ describe('FilePatch', function() { }); describe('staging lines from deleted files', function() { + let buffer; let oldFile, deletionPatch; beforeEach(function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); + buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); const layers = buildLayers(buffer); const hunks = [ new Hunk({ @@ -374,19 +235,20 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'deleted', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'deleted', hunks, marker}); oldFile = new File({path: 'file.txt', mode: '100644'}); deletionPatch = new FilePatch(oldFile, nullFile, patch); }); it('handles staging part of the file', function() { - const stagedPatch = deletionPatch.getStagePatchForLines(new Set([1, 2])); + const stagedPatch = deletionPatch.buildStagePatchForLines(buffer, stagedLayeredBuffer, new Set([1, 2])); assert.strictEqual(stagedPatch.getStatus(), 'modified'); assert.strictEqual(stagedPatch.getOldFile(), oldFile); assert.strictEqual(stagedPatch.getNewFile(), oldFile); - assert.strictEqual(stagedPatch.getBuffer().getText(), '0000\n0001\n0002\n'); - assertInFilePatch(stagedPatch).hunks( + assert.strictEqual(stagedLayeredBuffer.buffer.getText(), '0000\n0001\n0002\n'); + assertInFilePatch(stagedPatch, stagedLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -400,12 +262,12 @@ describe('FilePatch', function() { }); it('handles staging all lines, leaving nothing unstaged', function() { - const stagedPatch = deletionPatch.getStagePatchForLines(new Set([1, 2, 3])); + const stagedPatch = deletionPatch.buildStagePatchForLines(buffer, stagedLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(stagedPatch.getStatus(), 'deleted'); assert.strictEqual(stagedPatch.getOldFile(), oldFile); assert.isFalse(stagedPatch.getNewFile().isPresent()); - assert.strictEqual(stagedPatch.getBuffer().getText(), '0000\n0001\n0002\n'); - assertInFilePatch(stagedPatch).hunks( + assert.strictEqual(stagedLayeredBuffer.buffer.getText(), '0000\n0001\n0002\n'); + assertInFilePatch(stagedPatch, stagedLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -418,8 +280,8 @@ describe('FilePatch', function() { }); it('unsets the newFile when a symlink is created where a file was deleted', function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); - const layers = buildLayers(buffer); + const nBuffer = new TextBuffer({text: '0000\n0001\n0002\n'}); + const layers = buildLayers(nBuffer); const hunks = [ new Hunk({ oldStartRow: 1, oldRowCount: 3, newStartRow: 1, newRowCount: 0, @@ -429,65 +291,28 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'deleted', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'deleted', hunks, marker}); oldFile = new File({path: 'file.txt', mode: '100644'}); const newFile = new File({path: 'file.txt', mode: '120000'}); const replacePatch = new FilePatch(oldFile, newFile, patch); - const stagedPatch = replacePatch.getStagePatchForLines(new Set([0, 1, 2])); + const stagedPatch = replacePatch.buildStagePatchForLines(nBuffer, stagedLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(stagedPatch.getOldFile(), oldFile); assert.isFalse(stagedPatch.getNewFile().isPresent()); }); }); }); - it('stages an entire hunk at once', function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n0005\n'}); - const layers = buildLayers(buffer); - const hunks = [ - new Hunk({ - oldStartRow: 10, oldRowCount: 2, newStartRow: 10, newRowCount: 3, - marker: markRange(layers.hunk, 0, 2), - regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1)), - new Unchanged(markRange(layers.unchanged, 2)), - ], - }), - new Hunk({ - oldStartRow: 20, oldRowCount: 3, newStartRow: 19, newRowCount: 2, - marker: markRange(layers.hunk, 3, 5), - regions: [ - new Unchanged(markRange(layers.unchanged, 3)), - new Deletion(markRange(layers.deletion, 4)), - new Unchanged(markRange(layers.unchanged, 5)), - ], - }), - ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); - const oldFile = new File({path: 'file.txt', mode: '100644'}); - const newFile = new File({path: 'file.txt', mode: '100644'}); - const filePatch = new FilePatch(oldFile, newFile, patch); + describe('getUnstagePatchForLines()', function() { + let unstageLayeredBuffer; - const stagedPatch = filePatch.getStagePatchForHunk(hunks[1]); - assert.strictEqual(stagedPatch.getBuffer().getText(), '0003\n0004\n0005\n'); - assert.strictEqual(stagedPatch.getOldFile(), oldFile); - assert.strictEqual(stagedPatch.getNewFile(), newFile); - assertInFilePatch(stagedPatch).hunks( - { - startRow: 0, - endRow: 2, - header: '@@ -20,3 +18,2 @@', - regions: [ - {kind: 'unchanged', string: ' 0003\n', range: [[0, 0], [0, 4]]}, - {kind: 'deletion', string: '-0004\n', range: [[1, 0], [1, 4]]}, - {kind: 'unchanged', string: ' 0005\n', range: [[2, 0], [2, 4]]}, - ], - }, - ); - }); + beforeEach(function() { + const buffer = new TextBuffer(); + const layers = buildLayers(buffer); + unstageLayeredBuffer = {buffer, layers}; + }); - describe('getUnstagePatchForLines()', function() { it('returns a new FilePatch that unstages only the specified lines', function() { const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n'}); const layers = buildLayers(buffer); @@ -503,17 +328,18 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 4); + const patch = new Patch({status: 'modified', hunks, marker}); const oldFile = new File({path: 'file.txt', mode: '100644'}); const newFile = new File({path: 'file.txt', mode: '100644'}); const filePatch = new FilePatch(oldFile, newFile, patch); - const unstagedPatch = filePatch.getUnstagePatchForLines(new Set([1, 3])); + const unstagedPatch = filePatch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([1, 3])); assert.strictEqual(unstagedPatch.getStatus(), 'modified'); assert.strictEqual(unstagedPatch.getOldFile(), newFile); assert.strictEqual(unstagedPatch.getNewFile(), newFile); - assert.strictEqual(unstagedPatch.getBuffer().getText(), '0000\n0001\n0002\n0003\n0004\n'); - assertInFilePatch(unstagedPatch).hunks( + assert.strictEqual(unstageLayeredBuffer.buffer.getText(), '0000\n0001\n0002\n0003\n0004\n'); + assertInFilePatch(unstagedPatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 4, @@ -530,10 +356,11 @@ describe('FilePatch', function() { }); describe('unstaging lines from an added file', function() { + let buffer; let newFile, addedPatch, addedFilePatch; beforeEach(function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); + buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); const layers = buildLayers(buffer); const hunks = [ new Hunk({ @@ -544,17 +371,18 @@ describe('FilePatch', function() { ], }), ]; + const marker = markRange(layers.patch, 0, 2); newFile = new File({path: 'file.txt', mode: '100644'}); - addedPatch = new Patch({status: 'added', hunks, buffer, layers}); + addedPatch = new Patch({status: 'added', hunks, marker}); addedFilePatch = new FilePatch(nullFile, newFile, addedPatch); }); it('handles unstaging part of the file', function() { - const unstagePatch = addedFilePatch.getUnstagePatchForLines(new Set([2])); + const unstagePatch = addedFilePatch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([2])); assert.strictEqual(unstagePatch.getStatus(), 'modified'); assert.strictEqual(unstagePatch.getOldFile(), newFile); assert.strictEqual(unstagePatch.getNewFile(), newFile); - assertInFilePatch(unstagePatch).hunks( + assertInFilePatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -568,11 +396,11 @@ describe('FilePatch', function() { }); it('handles unstaging all lines, leaving nothing staged', function() { - const unstagePatch = addedFilePatch.getUnstagePatchForLines(new Set([0, 1, 2])); + const unstagePatch = addedFilePatch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(unstagePatch.getStatus(), 'deleted'); assert.strictEqual(unstagePatch.getOldFile(), newFile); assert.isFalse(unstagePatch.getNewFile().isPresent()); - assertInFilePatch(unstagePatch).hunks( + assertInFilePatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -587,10 +415,10 @@ describe('FilePatch', function() { it('unsets the newFile when a symlink is deleted and a file is created in its place', function() { const oldSymlink = new File({path: 'file.txt', mode: '120000', symlink: 'wat.txt'}); const patch = new FilePatch(oldSymlink, newFile, addedPatch); - const unstagePatch = patch.getUnstagePatchForLines(new Set([0, 1, 2])); + const unstagePatch = patch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(unstagePatch.getOldFile(), newFile); assert.isFalse(unstagePatch.getNewFile().isPresent()); - assertInFilePatch(unstagePatch).hunks( + assertInFilePatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -604,10 +432,10 @@ describe('FilePatch', function() { }); describe('unstaging lines from a removed file', function() { - let oldFile, removedFilePatch; + let oldFile, removedFilePatch, buffer; beforeEach(function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); + buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); const layers = buildLayers(buffer); const hunks = [ new Hunk({ @@ -619,16 +447,17 @@ describe('FilePatch', function() { }), ]; oldFile = new File({path: 'file.txt', mode: '100644'}); - const removedPatch = new Patch({status: 'deleted', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const removedPatch = new Patch({status: 'deleted', hunks, marker}); removedFilePatch = new FilePatch(oldFile, nullFile, removedPatch); }); it('handles unstaging part of the file', function() { - const discardPatch = removedFilePatch.getUnstagePatchForLines(new Set([1])); + const discardPatch = removedFilePatch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([1])); assert.strictEqual(discardPatch.getStatus(), 'added'); assert.strictEqual(discardPatch.getOldFile(), nullFile); assert.strictEqual(discardPatch.getNewFile(), oldFile); - assertInFilePatch(discardPatch).hunks( + assertInFilePatch(discardPatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 0, @@ -641,11 +470,15 @@ describe('FilePatch', function() { }); it('handles unstaging the entire file', function() { - const discardPatch = removedFilePatch.getUnstagePatchForLines(new Set([0, 1, 2])); + const discardPatch = removedFilePatch.buildUnstagePatchForLines( + buffer, + unstageLayeredBuffer, + new Set([0, 1, 2]), + ); assert.strictEqual(discardPatch.getStatus(), 'added'); assert.strictEqual(discardPatch.getOldFile(), nullFile); assert.strictEqual(discardPatch.getNewFile(), oldFile); - assertInFilePatch(discardPatch).hunks( + assertInFilePatch(discardPatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -659,53 +492,7 @@ describe('FilePatch', function() { }); }); - it('unstages an entire hunk at once', function() { - const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n0005\n'}); - const layers = buildLayers(buffer); - const hunks = [ - new Hunk({ - oldStartRow: 10, oldRowCount: 2, newStartRow: 10, newRowCount: 3, - marker: markRange(layers.hunk, 0, 2), - regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1)), - new Unchanged(markRange(layers.unchanged, 2)), - ], - }), - new Hunk({ - oldStartRow: 20, oldRowCount: 3, newStartRow: 19, newRowCount: 2, - marker: markRange(layers.hunk, 3, 5), - regions: [ - new Unchanged(markRange(layers.unchanged, 3)), - new Deletion(markRange(layers.deletion, 4)), - new Unchanged(markRange(layers.unchanged, 5)), - ], - }), - ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); - const oldFile = new File({path: 'file.txt', mode: '100644'}); - const newFile = new File({path: 'file.txt', mode: '100644'}); - const filePatch = new FilePatch(oldFile, newFile, patch); - - const unstagedPatch = filePatch.getUnstagePatchForHunk(hunks[0]); - assert.strictEqual(unstagedPatch.getBuffer().getText(), '0000\n0001\n0002\n'); - assert.strictEqual(unstagedPatch.getOldFile(), newFile); - assert.strictEqual(unstagedPatch.getNewFile(), newFile); - assertInFilePatch(unstagedPatch).hunks( - { - startRow: 0, - endRow: 2, - header: '@@ -10,3 +10,2 @@', - regions: [ - {kind: 'unchanged', string: ' 0000\n', range: [[0, 0], [0, 4]]}, - {kind: 'deletion', string: '-0001\n', range: [[1, 0], [1, 4]]}, - {kind: 'unchanged', string: ' 0002\n', range: [[2, 0], [2, 4]]}, - ], - }, - ); - }); - - describe('toString()', function() { + describe('toStringIn()', function() { it('converts the patch to the standard textual format', function() { const buffer = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n0005\n0006\n0007\n'}); const layers = buildLayers(buffer); @@ -730,7 +517,8 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 7); + const patch = new Patch({status: 'modified', hunks, marker}); const oldFile = new File({path: 'a.txt', mode: '100644'}); const newFile = new File({path: 'b.txt', mode: '100755'}); const filePatch = new FilePatch(oldFile, newFile, patch); @@ -749,7 +537,7 @@ describe('FilePatch', function() { ' 0005\n' + '+0006\n' + ' 0007\n'; - assert.strictEqual(filePatch.toString(), expectedString); + assert.strictEqual(filePatch.toStringIn(buffer), expectedString); }); it('correctly formats a file with no newline at the end', function() { @@ -766,7 +554,8 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'modified', hunks, marker}); const oldFile = new File({path: 'a.txt', mode: '100644'}); const newFile = new File({path: 'b.txt', mode: '100755'}); const filePatch = new FilePatch(oldFile, newFile, patch); @@ -779,7 +568,7 @@ describe('FilePatch', function() { ' 0000\n' + '+0001\n' + '\\ No newline at end of file\n'; - assert.strictEqual(filePatch.toString(), expectedString); + assert.strictEqual(filePatch.toStringIn(buffer), expectedString); }); describe('typechange file patches', function() { @@ -795,7 +584,8 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'added', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 1); + const patch = new Patch({status: 'added', hunks, marker}); const oldFile = new File({path: 'a.txt', mode: '120000', symlink: 'dest.txt'}); const newFile = new File({path: 'a.txt', mode: '100644'}); const filePatch = new FilePatch(oldFile, newFile, patch); @@ -815,7 +605,7 @@ describe('FilePatch', function() { '@@ -1,0 +1,2 @@\n' + '+0000\n' + '+0001\n'; - assert.strictEqual(filePatch.toString(), expectedString); + assert.strictEqual(filePatch.toStringIn(buffer), expectedString); }); it('handles typechange patches for a file replaced with a symlink', function() { @@ -830,7 +620,8 @@ describe('FilePatch', function() { ], }), ]; - const patch = new Patch({status: 'deleted', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 1); + const patch = new Patch({status: 'deleted', hunks, marker}); const oldFile = new File({path: 'a.txt', mode: '100644'}); const newFile = new File({path: 'a.txt', mode: '120000', symlink: 'dest.txt'}); const filePatch = new FilePatch(oldFile, newFile, patch); @@ -850,15 +641,12 @@ describe('FilePatch', function() { '@@ -0,0 +1 @@\n' + '+dest.txt\n' + '\\ No newline at end of file\n'; - assert.strictEqual(filePatch.toString(), expectedString); + assert.strictEqual(filePatch.toStringIn(buffer), expectedString); }); }); }); it('has a nullFilePatch that stubs all FilePatch methods', function() { - const buffer = new TextBuffer({text: '0\n1\n2\n3\n'}); - const marker = markRange(buffer, 0, 1); - const nullFilePatch = FilePatch.createNull(); assert.isFalse(nullFilePatch.isPresent()); @@ -871,32 +659,21 @@ describe('FilePatch', function() { assert.isNull(nullFilePatch.getNewMode()); assert.isNull(nullFilePatch.getOldSymlink()); assert.isNull(nullFilePatch.getNewSymlink()); - assert.strictEqual(nullFilePatch.getByteSize(), 0); - assert.strictEqual(nullFilePatch.getBuffer().getText(), ''); - assert.lengthOf(nullFilePatch.getAdditionRanges(), 0); - assert.lengthOf(nullFilePatch.getDeletionRanges(), 0); - assert.lengthOf(nullFilePatch.getNoNewlineRanges(), 0); - assert.lengthOf(nullFilePatch.getHunkLayer().getMarkers(), 0); - assert.lengthOf(nullFilePatch.getUnchangedLayer().getMarkers(), 0); - assert.lengthOf(nullFilePatch.getAdditionLayer().getMarkers(), 0); - assert.lengthOf(nullFilePatch.getDeletionLayer().getMarkers(), 0); - assert.lengthOf(nullFilePatch.getNoNewlineLayer().getMarkers(), 0); assert.isFalse(nullFilePatch.didChangeExecutableMode()); assert.isFalse(nullFilePatch.hasSymlink()); assert.isFalse(nullFilePatch.hasTypechange()); assert.isNull(nullFilePatch.getPath()); assert.isNull(nullFilePatch.getStatus()); assert.lengthOf(nullFilePatch.getHunks(), 0); - assert.isFalse(nullFilePatch.getStagePatchForLines(new Set([0])).isPresent()); - assert.isFalse(nullFilePatch.getStagePatchForHunk(new Hunk({regions: [], marker})).isPresent()); - assert.isFalse(nullFilePatch.getUnstagePatchForLines(new Set([0])).isPresent()); - assert.isFalse(nullFilePatch.getUnstagePatchForHunk(new Hunk({regions: [], marker})).isPresent()); - assert.strictEqual(nullFilePatch.toString(), ''); + assert.isFalse(nullFilePatch.buildStagePatchForLines(new Set([0])).isPresent()); + assert.isFalse(nullFilePatch.buildUnstagePatchForLines(new Set([0])).isPresent()); + assert.strictEqual(nullFilePatch.toStringIn(new TextBuffer()), ''); }); }); function buildLayers(buffer) { return { + patch: buffer.addMarkerLayer(), hunk: buffer.addMarkerLayer(), unchanged: buffer.addMarkerLayer(), addition: buffer.addMarkerLayer(), diff --git a/test/models/patch/hunk.test.js b/test/models/patch/hunk.test.js index 56d7c879d6e..4080f261fca 100644 --- a/test/models/patch/hunk.test.js +++ b/test/models/patch/hunk.test.js @@ -50,25 +50,6 @@ describe('Hunk', function() { assert.strictEqual(h.bufferRowCount(), 11); assert.lengthOf(h.getChanges(), 3); assert.lengthOf(h.getRegions(), 4); - assert.lengthOf(h.getAdditionRanges(), 1); - assert.lengthOf(h.getDeletionRanges(), 2); - assert.isNull(h.getNoNewlineRange()); - }); - - it('returns the range of a no-newline region', function() { - const h = new Hunk({ - ...attrs, - regions: [ - new Addition(buffer.markRange([[1, 0], [2, 4]])), - new Deletion(buffer.markRange([[4, 0], [5, 4]])), - new Unchanged(buffer.markRange([[6, 0], [9, 4]])), - new NoNewline(buffer.markRange([[10, 0], [10, 4]])), - ], - }); - - const nl = h.getNoNewlineRange(); - assert.isNotNull(nl); - assert.deepEqual(nl.serialize(), [[10, 0], [10, 4]]); }); it('generates a patch section header', function() { diff --git a/test/models/patch/multi-file-patch.test.js b/test/models/patch/multi-file-patch.test.js new file mode 100644 index 00000000000..123812b2c25 --- /dev/null +++ b/test/models/patch/multi-file-patch.test.js @@ -0,0 +1,697 @@ +import dedent from 'dedent-js'; + +import {multiFilePatchBuilder, filePatchBuilder} from '../../builder/patch'; + +import MultiFilePatch from '../../../lib/models/patch/multi-file-patch'; +import {assertInFilePatch} from '../../helpers'; + +describe('MultiFilePatch', function() { + it('creates an empty patch when constructed with no arguments', function() { + const empty = new MultiFilePatch({}); + assert.isFalse(empty.anyPresent()); + assert.lengthOf(empty.getFilePatches(), 0); + }); + + it('detects when it is not empty', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(filePatch => { + filePatch + .setOldFile(file => file.path('file-0.txt')) + .setNewFile(file => file.path('file-0.txt')); + }) + .build(); + + assert.isTrue(multiFilePatch.anyPresent()); + }); + + describe('clone', function() { + let original; + + beforeEach(function() { + original = multiFilePatchBuilder() + .addFilePatch() + .addFilePatch() + .build() + .multiFilePatch; + }); + + it('defaults to creating an exact copy', function() { + const dup = original.clone(); + + assert.strictEqual(dup.getBuffer(), original.getBuffer()); + assert.strictEqual(dup.getPatchLayer(), original.getPatchLayer()); + assert.strictEqual(dup.getHunkLayer(), original.getHunkLayer()); + assert.strictEqual(dup.getUnchangedLayer(), original.getUnchangedLayer()); + assert.strictEqual(dup.getAdditionLayer(), original.getAdditionLayer()); + assert.strictEqual(dup.getDeletionLayer(), original.getDeletionLayer()); + assert.strictEqual(dup.getNoNewlineLayer(), original.getNoNewlineLayer()); + assert.strictEqual(dup.getFilePatches(), original.getFilePatches()); + }); + + it('creates a copy with a new buffer and layer set', function() { + const {buffer, layers} = multiFilePatchBuilder().build(); + const dup = original.clone({buffer, layers}); + + assert.strictEqual(dup.getBuffer(), buffer); + assert.strictEqual(dup.getPatchLayer(), layers.patch); + assert.strictEqual(dup.getHunkLayer(), layers.hunk); + assert.strictEqual(dup.getUnchangedLayer(), layers.unchanged); + assert.strictEqual(dup.getAdditionLayer(), layers.addition); + assert.strictEqual(dup.getDeletionLayer(), layers.deletion); + assert.strictEqual(dup.getNoNewlineLayer(), layers.noNewline); + assert.strictEqual(dup.getFilePatches(), original.getFilePatches()); + }); + + it('creates a copy with a new set of file patches', function() { + const nfp = [ + filePatchBuilder().build().filePatch, + filePatchBuilder().build().filePatch, + ]; + + const dup = original.clone({filePatches: nfp}); + assert.strictEqual(dup.getBuffer(), original.getBuffer()); + assert.strictEqual(dup.getPatchLayer(), original.getPatchLayer()); + assert.strictEqual(dup.getHunkLayer(), original.getHunkLayer()); + assert.strictEqual(dup.getUnchangedLayer(), original.getUnchangedLayer()); + assert.strictEqual(dup.getAdditionLayer(), original.getAdditionLayer()); + assert.strictEqual(dup.getDeletionLayer(), original.getDeletionLayer()); + assert.strictEqual(dup.getNoNewlineLayer(), original.getNoNewlineLayer()); + assert.strictEqual(dup.getFilePatches(), nfp); + }); + }); + + it('has an accessor for its file patches', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-0.txt'))) + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-1.txt'))) + .build(); + + assert.lengthOf(multiFilePatch.getFilePatches(), 2); + const [fp0, fp1] = multiFilePatch.getFilePatches(); + assert.strictEqual(fp0.getOldPath(), 'file-0.txt'); + assert.strictEqual(fp1.getOldPath(), 'file-1.txt'); + }); + + describe('didAnyChangeExecutableMode()', function() { + it('detects when at least one patch contains an executable mode change', function() { + const {multiFilePatch: yes} = multiFilePatchBuilder() + .addFilePatch(filePatch => { + filePatch.setOldFile(file => file.path('file-0.txt')); + filePatch.setNewFile(file => file.path('file-0.txt').executable()); + }) + .build(); + assert.isTrue(yes.didAnyChangeExecutableMode()); + }); + + it('detects when none of the patches contain an executable mode change', function() { + const {multiFilePatch: no} = multiFilePatchBuilder() + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-0.txt'))) + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-1.txt'))) + .build(); + assert.isFalse(no.didAnyChangeExecutableMode()); + }); + }); + + describe('anyHaveTypechange()', function() { + it('detects when at least one patch contains a symlink change', function() { + const {multiFilePatch: yes} = multiFilePatchBuilder() + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-0.txt'))) + .addFilePatch(filePatch => { + filePatch.setOldFile(file => file.path('file-0.txt')); + filePatch.setNewFile(file => file.path('file-0.txt').symlinkTo('somewhere.txt')); + }) + .build(); + assert.isTrue(yes.anyHaveTypechange()); + }); + + it('detects when none of its patches contain a symlink change', function() { + const {multiFilePatch: no} = multiFilePatchBuilder() + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-0.txt'))) + .addFilePatch(filePatch => filePatch.setOldFile(file => file.path('file-1.txt'))) + .build(); + assert.isFalse(no.anyHaveTypechange()); + }); + }); + + it('computes the maximum line number width of any hunk in any patch', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0.txt')); + fp.addHunk(h => h.oldRow(10)); + fp.addHunk(h => h.oldRow(99)); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-1.txt')); + fp.addHunk(h => h.oldRow(5)); + fp.addHunk(h => h.oldRow(15)); + }) + .build(); + + assert.strictEqual(multiFilePatch.getMaxLineNumberWidth(), 3); + }); + + it('locates an individual FilePatch by marker lookup', function() { + const builder = multiFilePatchBuilder(); + for (let i = 0; i < 10; i++) { + builder.addFilePatch(fp => { + fp.setOldFile(f => f.path(`file-${i}.txt`)); + fp.addHunk(h => { + h.oldRow(1).unchanged('a', 'b').added('c').deleted('d').unchanged('e'); + }); + fp.addHunk(h => { + h.oldRow(10).unchanged('f').deleted('g', 'h', 'i').unchanged('j'); + }); + }); + } + const {multiFilePatch} = builder.build(); + const fps = multiFilePatch.getFilePatches(); + + assert.isUndefined(multiFilePatch.getFilePatchAt(-1)); + assert.strictEqual(multiFilePatch.getFilePatchAt(0), fps[0]); + assert.strictEqual(multiFilePatch.getFilePatchAt(9), fps[0]); + assert.strictEqual(multiFilePatch.getFilePatchAt(10), fps[1]); + assert.strictEqual(multiFilePatch.getFilePatchAt(99), fps[9]); + assert.isUndefined(multiFilePatch.getFilePatchAt(101)); + }); + + it('creates a set of all unique paths referenced by patches', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0-before.txt')); + fp.setNewFile(f => f.path('file-0-after.txt')); + }) + .addFilePatch(fp => { + fp.status('added'); + fp.nullOldFile(); + fp.setNewFile(f => f.path('file-1.txt')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-2.txt')); + fp.setNewFile(f => f.path('file-2.txt')); + }) + .build(); + + assert.sameMembers( + Array.from(multiFilePatch.getPathSet()), + ['file-0-before.txt', 'file-0-after.txt', 'file-1.txt', 'file-2.txt'], + ); + }); + + it('locates a Hunk by marker lookup', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.oldRow(1).added('0', '1', '2', '3', '4')); + fp.addHunk(h => h.oldRow(10).deleted('5', '6', '7', '8', '9')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.oldRow(5).unchanged('10', '11').added('12').deleted('13')); + fp.addHunk(h => h.oldRow(20).unchanged('14').deleted('15')); + }) + .addFilePatch(fp => { + fp.status('deleted'); + fp.addHunk(h => h.oldRow(4).deleted('16', '17', '18', '19')); + }) + .build(); + + const [fp0, fp1, fp2] = multiFilePatch.getFilePatches(); + + assert.isUndefined(multiFilePatch.getHunkAt(-1)); + assert.strictEqual(multiFilePatch.getHunkAt(0), fp0.getHunks()[0]); + assert.strictEqual(multiFilePatch.getHunkAt(4), fp0.getHunks()[0]); + assert.strictEqual(multiFilePatch.getHunkAt(5), fp0.getHunks()[1]); + assert.strictEqual(multiFilePatch.getHunkAt(9), fp0.getHunks()[1]); + assert.strictEqual(multiFilePatch.getHunkAt(10), fp1.getHunks()[0]); + assert.strictEqual(multiFilePatch.getHunkAt(15), fp1.getHunks()[1]); + assert.strictEqual(multiFilePatch.getHunkAt(16), fp2.getHunks()[0]); + assert.strictEqual(multiFilePatch.getHunkAt(19), fp2.getHunks()[0]); + assert.isUndefined(multiFilePatch.getHunkAt(21)); + }); + + it('represents itself as an apply-ready string', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('0;0;0').added('0;0;1').deleted('0;0;2').unchanged('0;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('0;1;0').added('0;1;1').deleted('0;1;2').unchanged('0;1;3')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-1.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('1;0;0').added('1;0;1').deleted('1;0;2').unchanged('1;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('1;1;0').added('1;1;1').deleted('1;1;2').unchanged('1;1;3')); + }) + .build(); + + assert.strictEqual(multiFilePatch.toString(), dedent` + diff --git a/file-0.txt b/file-0.txt + --- a/file-0.txt + +++ b/file-0.txt + @@ -1,3 +1,3 @@ + 0;0;0 + +0;0;1 + -0;0;2 + 0;0;3 + @@ -10,3 +10,3 @@ + 0;1;0 + +0;1;1 + -0;1;2 + 0;1;3 + diff --git a/file-1.txt b/file-1.txt + --- a/file-1.txt + +++ b/file-1.txt + @@ -1,3 +1,3 @@ + 1;0;0 + +1;0;1 + -1;0;2 + 1;0;3 + @@ -10,3 +10,3 @@ + 1;1;0 + +1;1;1 + -1;1;2 + 1;1;3 + + `); + }); + + it('adopts a buffer from a previous patch', function() { + const {multiFilePatch: lastMultiPatch, buffer: lastBuffer, layers: lastLayers} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('a0').added('a1').deleted('a2').unchanged('a3')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('a4').deleted('a5').unchanged('a6')); + fp.addHunk(h => h.unchanged('a7').added('a8').unchanged('a9')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.oldRow(99).deleted('7').noNewline()); + }) + .build(); + + const {multiFilePatch: nextMultiPatch, buffer: nextBuffer, layers: nextLayers} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('b0', 'b1').added('b2').unchanged('b3', 'b4')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('b5', 'b6').added('b7')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('b8', 'b9').deleted('b10').unchanged('b11')); + fp.addHunk(h => h.oldRow(99).deleted('b12').noNewline()); + }) + .build(); + + assert.notStrictEqual(nextBuffer, lastBuffer); + assert.notStrictEqual(nextLayers, lastLayers); + + nextMultiPatch.adoptBufferFrom(lastMultiPatch); + + assert.strictEqual(nextMultiPatch.getBuffer(), lastBuffer); + assert.strictEqual(nextMultiPatch.getPatchLayer(), lastLayers.patch); + assert.strictEqual(nextMultiPatch.getHunkLayer(), lastLayers.hunk); + assert.strictEqual(nextMultiPatch.getUnchangedLayer(), lastLayers.unchanged); + assert.strictEqual(nextMultiPatch.getAdditionLayer(), lastLayers.addition); + assert.strictEqual(nextMultiPatch.getDeletionLayer(), lastLayers.deletion); + assert.strictEqual(nextMultiPatch.getNoNewlineLayer(), lastLayers.noNewline); + + assert.deepEqual(lastBuffer.getText(), dedent` + b0 + b1 + b2 + b3 + b4 + b5 + b6 + b7 + b8 + b9 + b10 + b11 + b12 + No newline at end of file + + `); + + const assertMarkedLayerRanges = (layer, ranges) => { + assert.deepEqual(layer.getMarkers().map(m => m.getRange().serialize()), ranges); + }; + + assertMarkedLayerRanges(lastLayers.patch, [ + [[0, 0], [4, 2]], [[5, 0], [7, 2]], [[8, 0], [13, 26]], + ]); + assertMarkedLayerRanges(lastLayers.hunk, [ + [[0, 0], [4, 2]], [[5, 0], [7, 2]], [[8, 0], [11, 3]], [[12, 0], [13, 26]], + ]); + assertMarkedLayerRanges(lastLayers.unchanged, [ + [[0, 0], [1, 2]], [[3, 0], [4, 2]], [[5, 0], [6, 2]], [[8, 0], [9, 2]], [[11, 0], [11, 3]], + ]); + assertMarkedLayerRanges(lastLayers.addition, [ + [[2, 0], [2, 2]], [[7, 0], [7, 2]], + ]); + assertMarkedLayerRanges(lastLayers.deletion, [ + [[10, 0], [10, 3]], [[12, 0], [12, 3]], + ]); + assertMarkedLayerRanges(lastLayers.noNewline, [ + [[13, 0], [13, 26]], + ]); + }); + + describe('derived patch generation', function() { + let multiFilePatch, rowSet; + + beforeEach(function() { + // The row content pattern here is: ${fileno};${hunkno};${lineno}, with a (**) if it's selected + multiFilePatch = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('0;0;0').added('0;0;1').deleted('0;0;2').unchanged('0;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('0;1;0').added('0;1;1').deleted('0;1;2').unchanged('0;1;3')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-1.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('1;0;0').added('1;0;1 (**)').deleted('1;0;2').unchanged('1;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('1;1;0').added('1;1;1').deleted('1;1;2 (**)').unchanged('1;1;3')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-2.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('2;0;0').added('2;0;1').deleted('2;0;2').unchanged('2;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('2;1;0').added('2;1;1').deleted('2;2;2').unchanged('2;1;3')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-3.txt')); + fp.addHunk(h => h.oldRow(1).unchanged('3;0;0').added('3;0;1 (**)').deleted('3;0;2 (**)').unchanged('3;0;3')); + fp.addHunk(h => h.oldRow(10).unchanged('3;1;0').added('3;1;1').deleted('3;2;2').unchanged('3;1;3')); + }) + .build() + .multiFilePatch; + + // Buffer rows corresponding to the rows marked with (**) above + rowSet = new Set([9, 14, 25, 26]); + }); + + it('generates a stage patch for arbitrary buffer rows', function() { + const stagePatch = multiFilePatch.getStagePatchForLines(rowSet); + + assert.strictEqual(stagePatch.getBuffer().getText(), dedent` + 1;0;0 + 1;0;1 (**) + 1;0;2 + 1;0;3 + 1;1;0 + 1;1;2 (**) + 1;1;3 + 3;0;0 + 3;0;1 (**) + 3;0;2 (**) + 3;0;3 + + `); + + assert.lengthOf(stagePatch.getFilePatches(), 2); + const [fp0, fp1] = stagePatch.getFilePatches(); + assert.strictEqual(fp0.getOldPath(), 'file-1.txt'); + assertInFilePatch(fp0, stagePatch.getBuffer()).hunks( + { + startRow: 0, endRow: 3, + header: '@@ -1,3 +1,4 @@', + regions: [ + {kind: 'unchanged', string: ' 1;0;0\n', range: [[0, 0], [0, 5]]}, + {kind: 'addition', string: '+1;0;1 (**)\n', range: [[1, 0], [1, 10]]}, + {kind: 'unchanged', string: ' 1;0;2\n 1;0;3\n', range: [[2, 0], [3, 5]]}, + ], + }, + { + startRow: 4, endRow: 6, + header: '@@ -10,3 +11,2 @@', + regions: [ + {kind: 'unchanged', string: ' 1;1;0\n', range: [[4, 0], [4, 5]]}, + {kind: 'deletion', string: '-1;1;2 (**)\n', range: [[5, 0], [5, 10]]}, + {kind: 'unchanged', string: ' 1;1;3\n', range: [[6, 0], [6, 5]]}, + ], + }, + ); + + assert.strictEqual(fp1.getOldPath(), 'file-3.txt'); + assertInFilePatch(fp1, stagePatch.getBuffer()).hunks( + { + startRow: 7, endRow: 10, + header: '@@ -1,3 +1,3 @@', + regions: [ + {kind: 'unchanged', string: ' 3;0;0\n', range: [[7, 0], [7, 5]]}, + {kind: 'addition', string: '+3;0;1 (**)\n', range: [[8, 0], [8, 10]]}, + {kind: 'deletion', string: '-3;0;2 (**)\n', range: [[9, 0], [9, 10]]}, + {kind: 'unchanged', string: ' 3;0;3\n', range: [[10, 0], [10, 5]]}, + ], + }, + ); + }); + + it('generates a stage patch from an arbitrary hunk', function() { + const hunk = multiFilePatch.getFilePatches()[0].getHunks()[1]; + const stagePatch = multiFilePatch.getStagePatchForHunk(hunk); + + assert.strictEqual(stagePatch.getBuffer().getText(), dedent` + 0;1;0 + 0;1;1 + 0;1;2 + 0;1;3 + + `); + assert.lengthOf(stagePatch.getFilePatches(), 1); + const [fp0] = stagePatch.getFilePatches(); + assert.strictEqual(fp0.getOldPath(), 'file-0.txt'); + assert.strictEqual(fp0.getNewPath(), 'file-0.txt'); + assertInFilePatch(fp0, stagePatch.getBuffer()).hunks( + { + startRow: 0, endRow: 3, + header: '@@ -10,3 +10,3 @@', + regions: [ + {kind: 'unchanged', string: ' 0;1;0\n', range: [[0, 0], [0, 5]]}, + {kind: 'addition', string: '+0;1;1\n', range: [[1, 0], [1, 5]]}, + {kind: 'deletion', string: '-0;1;2\n', range: [[2, 0], [2, 5]]}, + {kind: 'unchanged', string: ' 0;1;3\n', range: [[3, 0], [3, 5]]}, + ], + }, + ); + }); + + it('generates an unstage patch for arbitrary buffer rows', function() { + const unstagePatch = multiFilePatch.getUnstagePatchForLines(rowSet); + + assert.strictEqual(unstagePatch.getBuffer().getText(), dedent` + 1;0;0 + 1;0;1 (**) + 1;0;3 + 1;1;0 + 1;1;1 + 1;1;2 (**) + 1;1;3 + 3;0;0 + 3;0;1 (**) + 3;0;2 (**) + 3;0;3 + + `); + + assert.lengthOf(unstagePatch.getFilePatches(), 2); + const [fp0, fp1] = unstagePatch.getFilePatches(); + assert.strictEqual(fp0.getOldPath(), 'file-1.txt'); + assertInFilePatch(fp0, unstagePatch.getBuffer()).hunks( + { + startRow: 0, endRow: 2, + header: '@@ -1,3 +1,2 @@', + regions: [ + {kind: 'unchanged', string: ' 1;0;0\n', range: [[0, 0], [0, 5]]}, + {kind: 'deletion', string: '-1;0;1 (**)\n', range: [[1, 0], [1, 10]]}, + {kind: 'unchanged', string: ' 1;0;3\n', range: [[2, 0], [2, 5]]}, + ], + }, + { + startRow: 3, endRow: 6, + header: '@@ -10,3 +9,4 @@', + regions: [ + {kind: 'unchanged', string: ' 1;1;0\n 1;1;1\n', range: [[3, 0], [4, 5]]}, + {kind: 'addition', string: '+1;1;2 (**)\n', range: [[5, 0], [5, 10]]}, + {kind: 'unchanged', string: ' 1;1;3\n', range: [[6, 0], [6, 5]]}, + ], + }, + ); + + assert.strictEqual(fp1.getOldPath(), 'file-3.txt'); + assertInFilePatch(fp1, unstagePatch.getBuffer()).hunks( + { + startRow: 7, endRow: 10, + header: '@@ -1,3 +1,3 @@', + regions: [ + {kind: 'unchanged', string: ' 3;0;0\n', range: [[7, 0], [7, 5]]}, + {kind: 'deletion', string: '-3;0;1 (**)\n', range: [[8, 0], [8, 10]]}, + {kind: 'addition', string: '+3;0;2 (**)\n', range: [[9, 0], [9, 10]]}, + {kind: 'unchanged', string: ' 3;0;3\n', range: [[10, 0], [10, 5]]}, + ], + }, + ); + }); + + it('generates an unstage patch for an arbitrary hunk', function() { + const hunk = multiFilePatch.getFilePatches()[1].getHunks()[0]; + const unstagePatch = multiFilePatch.getUnstagePatchForHunk(hunk); + + assert.strictEqual(unstagePatch.getBuffer().getText(), dedent` + 1;0;0 + 1;0;1 (**) + 1;0;2 + 1;0;3 + + `); + assert.lengthOf(unstagePatch.getFilePatches(), 1); + const [fp0] = unstagePatch.getFilePatches(); + assert.strictEqual(fp0.getOldPath(), 'file-1.txt'); + assert.strictEqual(fp0.getNewPath(), 'file-1.txt'); + assertInFilePatch(fp0, unstagePatch.getBuffer()).hunks( + { + startRow: 0, endRow: 3, + header: '@@ -1,3 +1,3 @@', + regions: [ + {kind: 'unchanged', string: ' 1;0;0\n', range: [[0, 0], [0, 5]]}, + {kind: 'deletion', string: '-1;0;1 (**)\n', range: [[1, 0], [1, 10]]}, + {kind: 'addition', string: '+1;0;2\n', range: [[2, 0], [2, 5]]}, + {kind: 'unchanged', string: ' 1;0;3\n', range: [[3, 0], [3, 5]]}, + ], + }, + ); + }); + }); + + describe('next selection range derivation', function() { + it('selects the origin if the new patch is empty', function() { + const {multiFilePatch: lastMultiPatch} = multiFilePatchBuilder().addFilePatch().build(); + const {multiFilePatch: nextMultiPatch} = multiFilePatchBuilder().build(); + + const nextSelectionRange = nextMultiPatch.getNextSelectionRange(lastMultiPatch, new Set()); + assert.deepEqual(nextSelectionRange.serialize(), [[0, 0], [0, 0]]); + }); + + it('selects the first change row if there was no prior selection', function() { + const {multiFilePatch: lastMultiPatch} = multiFilePatchBuilder().build(); + const {multiFilePatch: nextMultiPatch} = multiFilePatchBuilder().addFilePatch().build(); + const nextSelectionRange = nextMultiPatch.getNextSelectionRange(lastMultiPatch, new Set()); + assert.deepEqual(nextSelectionRange.serialize(), [[1, 0], [1, Infinity]]); + }); + + it('preserves the numeric index of the highest selected change row', function() { + const {multiFilePatch: lastMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0', '1', 'x *').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('2').added('3').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').deleted('4', '5 *', '6').unchanged('.')); + fp.addHunk(h => h.unchanged('.').added('7').unchanged('.')); + }) + .build(); + + const {multiFilePatch: nextMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0', '1').unchanged('x', '.')); + fp.addHunk(h => h.unchanged('.').deleted('2').added('3').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').deleted('4', '6 *').unchanged('.')); + fp.addHunk(h => h.unchanged('.').added('7').unchanged('.')); + }) + .build(); + + const nextSelectionRange = nextMultiPatch.getNextSelectionRange(lastMultiPatch, new Set([3, 11])); + assert.deepEqual(nextSelectionRange.serialize(), [[11, 0], [11, Infinity]]); + }); + + describe('when the bottom-most changed row is selected', function() { + it('selects the bottom-most changed row of the new patch', function() { + const {multiFilePatch: lastMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0', '1', 'x').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('2').added('3').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').deleted('4', '5', '6').unchanged('.')); + fp.addHunk(h => h.unchanged('.').added('7', '8 *').unchanged('.')); + }) + .build(); + + const {multiFilePatch: nextMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0', '1', 'x').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('2').added('3').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').deleted('4', '5', '6').unchanged('.')); + fp.addHunk(h => h.unchanged('.').added('7').unchanged('.')); + }) + .build(); + + const nextSelectionRange = nextMultiPatch.getNextSelectionRange(lastMultiPatch, new Set([16])); + assert.deepEqual(nextSelectionRange.serialize(), [[15, 0], [15, Infinity]]); + }); + }); + + it('skips hunks that were completely selected', function() { + const {multiFilePatch: lastMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0').unchanged('.')); + fp.addHunk(h => h.unchanged('.').added('x *', 'x *').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').deleted('x *').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('x *', '1').deleted('2').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('x *').unchanged('.')); + fp.addHunk(h => h.unchanged('.', '.').deleted('4', '5 *', '6').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('7', '8').unchanged('.', '.')); + }) + .build(); + + const {multiFilePatch: nextMultiPatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.').added('0').unchanged('.')); + }) + .addFilePatch(fp => { + fp.addHunk(h => h.unchanged('.', 'x').added('1').deleted('2').unchanged('.')); + fp.addHunk(h => h.unchanged('.', '.').deleted('4', '6 +').unchanged('.')); + fp.addHunk(h => h.unchanged('.').deleted('7', '8').unchanged('.', '.')); + }) + .build(); + + const nextSelectionRange = nextMultiPatch.getNextSelectionRange( + lastMultiPatch, + new Set([4, 5, 8, 11, 16, 21]), + ); + assert.deepEqual(nextSelectionRange.serialize(), [[11, 0], [11, Infinity]]); + }); + }); + + describe('file-patch spanning selection detection', function() { + let multiFilePatch; + + beforeEach(function() { + multiFilePatch = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0')); + fp.addHunk(h => h.unchanged('0').added('1').deleted('2', '3').unchanged('4')); + fp.addHunk(h => h.unchanged('5').added('6').unchanged('7')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-1')); + fp.addHunk(h => h.unchanged('8').deleted('9', '10').unchanged('11')); + }) + .build() + .multiFilePatch; + }); + + it('with buffer positions belonging to a single patch', function() { + assert.isFalse(multiFilePatch.spansMultipleFiles([1, 5])); + }); + + it('with buffer positions belonging to multiple patches', function() { + assert.isTrue(multiFilePatch.spansMultipleFiles([6, 10])); + }); + }); +}); diff --git a/test/models/patch/patch.test.js b/test/models/patch/patch.test.js index 0ee6dccbef0..b5989ab3fe9 100644 --- a/test/models/patch/patch.test.js +++ b/test/models/patch/patch.test.js @@ -9,23 +9,11 @@ describe('Patch', function() { it('has some standard accessors', function() { const buffer = new TextBuffer({text: 'bufferText'}); const layers = buildLayers(buffer); - const p = new Patch({status: 'modified', hunks: [], buffer, layers}); + const marker = markRange(layers.patch, 0, Infinity); + const p = new Patch({status: 'modified', hunks: [], marker}); assert.strictEqual(p.getStatus(), 'modified'); assert.deepEqual(p.getHunks(), []); - assert.strictEqual(p.getBuffer().getText(), 'bufferText'); assert.isTrue(p.isPresent()); - - assert.strictEqual(p.getUnchangedLayer().getMarkerCount(), 0); - assert.strictEqual(p.getAdditionLayer().getMarkerCount(), 0); - assert.strictEqual(p.getDeletionLayer().getMarkerCount(), 0); - assert.strictEqual(p.getNoNewlineLayer().getMarkerCount(), 0); - }); - - it('computes the byte size of the total patch data', function() { - const buffer = new TextBuffer({text: '\u00bd + \u00bc = \u00be'}); - const layers = buildLayers(buffer); - const p = new Patch({status: 'modified', hunks: [], buffer, layers}); - assert.strictEqual(p.getByteSize(), 12); }); it('computes the total changed line count', function() { @@ -58,7 +46,9 @@ describe('Patch', function() { ], }), ]; - const p = new Patch({status: 'modified', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, Infinity); + + const p = new Patch({status: 'modified', hunks, marker}); assert.strictEqual(p.getChangedLineCount(), 10); }); @@ -90,74 +80,40 @@ describe('Patch', function() { assert.strictEqual(p1.getMaxLineNumberWidth(), 0); }); - it('accesses the Hunk at a buffer row', function() { - const buffer = buildBuffer(8); - const layers = buildLayers(buffer); - const hunk0 = new Hunk({ - oldStartRow: 1, oldRowCount: 4, newStartRow: 1, newRowCount: 4, - marker: markRange(layers.hunk, 0, 3), - regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1)), - new Deletion(markRange(layers.deletion, 2)), - new Unchanged(markRange(layers.unchanged, 3)), - ], - }); - const hunk1 = new Hunk({ - oldStartRow: 10, oldRowCount: 4, newStartRow: 10, newRowCount: 4, - marker: markRange(layers.hunk, 4, 7), - regions: [ - new Unchanged(markRange(layers.unchanged, 4)), - new Deletion(markRange(layers.deletion, 5)), - new Addition(markRange(layers.addition, 6)), - new Unchanged(markRange(layers.unchanged, 7)), - ], - }); - const hunks = [hunk0, hunk1]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); - - assert.strictEqual(patch.getHunkAt(0), hunk0); - assert.strictEqual(patch.getHunkAt(1), hunk0); - assert.strictEqual(patch.getHunkAt(2), hunk0); - assert.strictEqual(patch.getHunkAt(3), hunk0); - assert.strictEqual(patch.getHunkAt(4), hunk1); - assert.strictEqual(patch.getHunkAt(5), hunk1); - assert.strictEqual(patch.getHunkAt(6), hunk1); - assert.strictEqual(patch.getHunkAt(7), hunk1); - assert.isUndefined(patch.getHunkAt(10)); - }); - it('clones itself with optionally overridden properties', function() { const buffer = new TextBuffer({text: 'bufferText'}); const layers = buildLayers(buffer); - const original = new Patch({status: 'modified', hunks: [], buffer, layers}); + const marker = markRange(layers.patch, 0, Infinity); + + const original = new Patch({status: 'modified', hunks: [], marker}); const dup0 = original.clone(); assert.notStrictEqual(dup0, original); assert.strictEqual(dup0.getStatus(), 'modified'); assert.deepEqual(dup0.getHunks(), []); - assert.strictEqual(dup0.getBuffer().getText(), 'bufferText'); + assert.strictEqual(dup0.getMarker(), marker); const dup1 = original.clone({status: 'added'}); assert.notStrictEqual(dup1, original); assert.strictEqual(dup1.getStatus(), 'added'); assert.deepEqual(dup1.getHunks(), []); - assert.strictEqual(dup1.getBuffer().getText(), 'bufferText'); + assert.strictEqual(dup0.getMarker(), marker); const hunks = [new Hunk({regions: []})]; const dup2 = original.clone({hunks}); assert.notStrictEqual(dup2, original); assert.strictEqual(dup2.getStatus(), 'modified'); assert.deepEqual(dup2.getHunks(), hunks); - assert.strictEqual(dup2.getBuffer().getText(), 'bufferText'); + assert.strictEqual(dup0.getMarker(), marker); const nBuffer = new TextBuffer({text: 'changed'}); const nLayers = buildLayers(nBuffer); - const dup3 = original.clone({buffer: nBuffer, layers: nLayers}); + const nMarker = markRange(nLayers.patch, 0, Infinity); + const dup3 = original.clone({marker: nMarker}); assert.notStrictEqual(dup3, original); assert.strictEqual(dup3.getStatus(), 'modified'); assert.deepEqual(dup3.getHunks(), []); - assert.strictEqual(dup3.getBuffer().getText(), 'changed'); + assert.strictEqual(dup3.getMarker(), nMarker); }); it('clones a nullPatch as a nullPatch', function() { @@ -172,32 +128,55 @@ describe('Patch', function() { assert.notStrictEqual(dup0, nullPatch); assert.strictEqual(dup0.getStatus(), 'added'); assert.deepEqual(dup0.getHunks(), []); - assert.strictEqual(dup0.getBuffer().getText(), ''); + assert.deepEqual(dup0.getMarker().getRange().serialize(), [[0, 0], [0, 0]]); const hunks = [new Hunk({regions: []})]; const dup1 = nullPatch.clone({hunks}); assert.notStrictEqual(dup1, nullPatch); assert.isNull(dup1.getStatus()); assert.deepEqual(dup1.getHunks(), hunks); - assert.strictEqual(dup1.getBuffer().getText(), ''); + assert.deepEqual(dup0.getMarker().getRange().serialize(), [[0, 0], [0, 0]]); const nBuffer = new TextBuffer({text: 'changed'}); const nLayers = buildLayers(nBuffer); - const dup2 = nullPatch.clone({buffer: nBuffer, layers: nLayers}); + const nMarker = markRange(nLayers.patch, 0, Infinity); + const dup2 = nullPatch.clone({marker: nMarker}); assert.notStrictEqual(dup2, nullPatch); assert.isNull(dup2.getStatus()); assert.deepEqual(dup2.getHunks(), []); - assert.strictEqual(dup2.getBuffer().getText(), 'changed'); + assert.strictEqual(dup2.getMarker(), nMarker); + }); + + it('returns an empty Range at the beginning of its Marker', function() { + const {patch} = buildPatchFixture(); + assert.deepEqual(patch.getStartRange().serialize(), [[0, 0], [0, 0]]); + }); + + it('determines whether or not a buffer row belongs to this patch', function() { + const {patch} = buildPatchFixture(); + + assert.isTrue(patch.containsRow(0)); + assert.isTrue(patch.containsRow(5)); + assert.isTrue(patch.containsRow(26)); + assert.isFalse(patch.containsRow(27)); }); describe('stage patch generation', function() { + let stageLayeredBuffer; + + beforeEach(function() { + const stageBuffer = new TextBuffer(); + const stageLayers = buildLayers(stageBuffer); + stageLayeredBuffer = {buffer: stageBuffer, layers: stageLayers}; + }); + it('creates a patch that applies selected lines from only the first hunk', function() { - const patch = buildPatchFixture(); - const stagePatch = patch.getStagePatchForLines(new Set([2, 3, 4, 5])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + const stagePatch = patch.buildStagePatchForLines(originalBuffer, stageLayeredBuffer, new Set([2, 3, 4, 5])); // buffer rows: 0 1 2 3 4 5 6 const expectedBufferText = '0000\n0001\n0002\n0003\n0004\n0005\n0006\n'; - assert.strictEqual(stagePatch.getBuffer().getText(), expectedBufferText); - assertInPatch(stagePatch).hunks( + assert.strictEqual(stageLayeredBuffer.buffer.getText(), expectedBufferText); + assertInPatch(stagePatch, stageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 6, @@ -213,12 +192,12 @@ describe('Patch', function() { }); it('creates a patch that applies selected lines from a single non-first hunk', function() { - const patch = buildPatchFixture(); - const stagePatch = patch.getStagePatchForLines(new Set([8, 13, 14, 16])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + const stagePatch = patch.buildStagePatchForLines(originalBuffer, stageLayeredBuffer, new Set([8, 13, 14, 16])); // buffer rows: 0 1 2 3 4 5 6 7 8 9 const expectedBufferText = '0007\n0008\n0010\n0011\n0012\n0013\n0014\n0015\n0016\n0018\n'; - assert.strictEqual(stagePatch.getBuffer().getText(), expectedBufferText); - assertInPatch(stagePatch).hunks( + assert.strictEqual(stageLayeredBuffer.buffer.getText(), expectedBufferText); + assertInPatch(stagePatch, stageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 9, @@ -237,8 +216,8 @@ describe('Patch', function() { }); it('creates a patch that applies selected lines from several hunks', function() { - const patch = buildPatchFixture(); - const stagePatch = patch.getStagePatchForLines(new Set([1, 5, 15, 16, 17, 25])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + const stagePatch = patch.buildStagePatchForLines(originalBuffer, stageLayeredBuffer, new Set([1, 5, 15, 16, 17, 25])); const expectedBufferText = // buffer rows // 0 1 2 3 4 @@ -247,8 +226,8 @@ describe('Patch', function() { '0007\n0010\n0011\n0012\n0013\n0014\n0015\n0016\n0017\n0018\n' + // 15 16 17 '0024\n0025\n No newline at end of file\n'; - assert.strictEqual(stagePatch.getBuffer().getText(), expectedBufferText); - assertInPatch(stagePatch).hunks( + assert.strictEqual(stageLayeredBuffer.buffer.getText(), expectedBufferText); + assertInPatch(stagePatch, stageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 4, @@ -286,15 +265,15 @@ describe('Patch', function() { }); it('marks ranges for each change region on the correct marker layer', function() { - const patch = buildPatchFixture(); - const stagePatch = patch.getStagePatchForLines(new Set([1, 5, 15, 16, 17, 25])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + patch.buildStagePatchForLines(originalBuffer, stageLayeredBuffer, new Set([1, 5, 15, 16, 17, 25])); const layerRanges = [ - ['hunk', stagePatch.getHunkLayer()], - ['unchanged', stagePatch.getUnchangedLayer()], - ['addition', stagePatch.getAdditionLayer()], - ['deletion', stagePatch.getDeletionLayer()], - ['noNewline', stagePatch.getNoNewlineLayer()], + ['hunk', stageLayeredBuffer.layers.hunk], + ['unchanged', stageLayeredBuffer.layers.unchanged], + ['addition', stageLayeredBuffer.layers.addition], + ['deletion', stageLayeredBuffer.layers.deletion], + ['noNewline', stageLayeredBuffer.layers.noNewline], ].reduce((obj, [key, layer]) => { obj[key] = layer.getMarkers().map(marker => marker.getRange().serialize()); return obj; @@ -342,12 +321,13 @@ describe('Patch', function() { ], }), ]; + const marker = markRange(layers.patch, 0, 5); - const patch = new Patch({status: 'deleted', hunks, buffer, layers}); + const patch = new Patch({status: 'deleted', hunks, marker}); - const stagedPatch = patch.getStagePatchForLines(new Set([1, 3, 4])); + const stagedPatch = patch.buildStagePatchForLines(buffer, stageLayeredBuffer, new Set([1, 3, 4])); assert.strictEqual(stagedPatch.getStatus(), 'modified'); - assertInPatch(stagedPatch).hunks( + assertInPatch(stagedPatch, stageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 5, @@ -375,28 +355,37 @@ describe('Patch', function() { ], }), ]; - const patch = new Patch({status: 'deleted', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'deleted', hunks, marker}); - const stagePatch0 = patch.getStagePatchForLines(new Set([0, 1, 2])); + const stagePatch0 = patch.buildStagePatchForLines(buffer, stageLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(stagePatch0.getStatus(), 'deleted'); }); it('returns a nullPatch as a nullPatch', function() { const nullPatch = Patch.createNull(); - assert.strictEqual(nullPatch.getStagePatchForLines(new Set([1, 2, 3])), nullPatch); + assert.strictEqual(nullPatch.buildStagePatchForLines(new Set([1, 2, 3])), nullPatch); }); }); describe('unstage patch generation', function() { + let unstageLayeredBuffer; + + beforeEach(function() { + const unstageBuffer = new TextBuffer(); + const unstageLayers = buildLayers(unstageBuffer); + unstageLayeredBuffer = {buffer: unstageBuffer, layers: unstageLayers}; + }); + it('creates a patch that updates the index to unapply selected lines from a single hunk', function() { - const patch = buildPatchFixture(); - const unstagePatch = patch.getUnstagePatchForLines(new Set([8, 12, 13])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + const unstagePatch = patch.buildUnstagePatchForLines(originalBuffer, unstageLayeredBuffer, new Set([8, 12, 13])); assert.strictEqual( - unstagePatch.getBuffer().getText(), + unstageLayeredBuffer.buffer.getText(), // 0 1 2 3 4 5 6 7 8 '0007\n0008\n0009\n0010\n0011\n0012\n0013\n0017\n0018\n', ); - assertInPatch(unstagePatch).hunks( + assertInPatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 8, @@ -413,10 +402,10 @@ describe('Patch', function() { }); it('creates a patch that updates the index to unapply lines from several hunks', function() { - const patch = buildPatchFixture(); - const unstagePatch = patch.getUnstagePatchForLines(new Set([1, 4, 5, 16, 17, 20, 25])); + const {patch, buffer: originalBuffer} = buildPatchFixture(); + const unstagePatch = patch.buildUnstagePatchForLines(originalBuffer, unstageLayeredBuffer, new Set([1, 4, 5, 16, 17, 20, 25])); assert.strictEqual( - unstagePatch.getBuffer().getText(), + unstageLayeredBuffer.buffer.getText(), // 0 1 2 3 4 5 '0000\n0001\n0003\n0004\n0005\n0006\n' + // 6 7 8 9 10 11 12 13 @@ -426,7 +415,7 @@ describe('Patch', function() { // 17 18 19 '0024\n0025\n No newline at end of file\n', ); - assertInPatch(unstagePatch).hunks( + assertInPatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 5, @@ -474,15 +463,14 @@ describe('Patch', function() { }); it('marks ranges for each change region on the correct marker layer', function() { - const patch = buildPatchFixture(); - const unstagePatch = patch.getUnstagePatchForLines(new Set([1, 4, 5, 16, 17, 20, 25])); - + const {patch, buffer: originalBuffer} = buildPatchFixture(); + patch.buildUnstagePatchForLines(originalBuffer, unstageLayeredBuffer, new Set([1, 4, 5, 16, 17, 20, 25])); const layerRanges = [ - ['hunk', unstagePatch.getHunkLayer()], - ['unchanged', unstagePatch.getUnchangedLayer()], - ['addition', unstagePatch.getAdditionLayer()], - ['deletion', unstagePatch.getDeletionLayer()], - ['noNewline', unstagePatch.getNoNewlineLayer()], + ['hunk', unstageLayeredBuffer.layers.hunk], + ['unchanged', unstageLayeredBuffer.layers.unchanged], + ['addition', unstageLayeredBuffer.layers.addition], + ['deletion', unstageLayeredBuffer.layers.deletion], + ['noNewline', unstageLayeredBuffer.layers.noNewline], ].reduce((obj, [key, layer]) => { obj[key] = layer.getMarkers().map(marker => marker.getRange().serialize()); return obj; @@ -533,11 +521,12 @@ describe('Patch', function() { ], }), ]; - const patch = new Patch({status: 'added', hunks, buffer, layers}); - const unstagePatch = patch.getUnstagePatchForLines(new Set([1, 2])); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'added', hunks, marker}); + const unstagePatch = patch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([1, 2])); assert.strictEqual(unstagePatch.getStatus(), 'modified'); - assert.strictEqual(unstagePatch.getBuffer().getText(), '0000\n0001\n0002\n'); - assertInPatch(unstagePatch).hunks( + assert.strictEqual(unstageLayeredBuffer.buffer.getText(), '0000\n0001\n0002\n'); + assertInPatch(unstagePatch, unstageLayeredBuffer.buffer).hunks( { startRow: 0, endRow: 2, @@ -565,22 +554,45 @@ describe('Patch', function() { ], }), ]; - const patch = new Patch({status: 'added', hunks, buffer, layers}); + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'added', hunks, marker}); - const unstagePatch = patch.getUnstagePatchForLines(new Set([0, 1, 2])); + const unstagePatch = patch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([0, 1, 2])); assert.strictEqual(unstagePatch.getStatus(), 'deleted'); }); + it('returns an addition when unstaging a deletion', function() { + const buffer = new TextBuffer({text: '0000\n0001\n0002\n'}); + const layers = buildLayers(buffer); + const hunks = [ + new Hunk({ + oldStartRow: 1, + oldRowCount: 0, + newStartRow: 1, + newRowCount: 3, + marker: markRange(layers.hunk, 0, 2), + regions: [ + new Addition(markRange(layers.addition, 0, 2)), + ], + }), + ]; + const marker = markRange(layers.patch, 0, 2); + const patch = new Patch({status: 'deleted', hunks, marker}); + + const unstagePatch = patch.buildUnstagePatchForLines(buffer, unstageLayeredBuffer, new Set([0, 1, 2])); + assert.strictEqual(unstagePatch.getStatus(), 'added'); + }); + it('returns a nullPatch as a nullPatch', function() { const nullPatch = Patch.createNull(); - assert.strictEqual(nullPatch.getUnstagePatchForLines(new Set([1, 2, 3])), nullPatch); + assert.strictEqual(nullPatch.buildUnstagePatchForLines(new Set([1, 2, 3])), nullPatch); }); }); describe('getFirstChangeRange', function() { it('accesses the range of the first change from the first hunk', function() { - const patch = buildPatchFixture(); - assert.deepEqual(patch.getFirstChangeRange(), [[1, 0], [1, Infinity]]); + const {patch} = buildPatchFixture(); + assert.deepEqual(patch.getFirstChangeRange().serialize(), [[1, 0], [1, Infinity]]); }); it('returns the origin if the first hunk is empty', function() { @@ -593,174 +605,17 @@ describe('Patch', function() { regions: [], }), ]; - const patch = new Patch({status: 'modified', hunks, buffer, layers}); - assert.deepEqual(patch.getFirstChangeRange(), [[0, 0], [0, 0]]); + const marker = markRange(layers.patch, 0); + const patch = new Patch({status: 'modified', hunks, marker}); + assert.deepEqual(patch.getFirstChangeRange().serialize(), [[0, 0], [0, 0]]); }); it('returns the origin if the patch is empty', function() { const buffer = new TextBuffer({text: ''}); const layers = buildLayers(buffer); - const patch = new Patch({status: 'modified', hunks: [], buffer, layers}); - assert.deepEqual(patch.getFirstChangeRange(), [[0, 0], [0, 0]]); - }); - }); - - describe('next selection range derivation', function() { - it('selects the first change region after the highest buffer row', function() { - const lastPatch = buildPatchFixture(); - // Selected: - // deletions (1-2) and partial addition (4 from 3-5) from hunk 0 - // one deletion row (13 from 12-16) from the middle of hunk 1; - // nothing in hunks 2 or 3 - const lastSelectedRows = new Set([1, 2, 4, 5, 13]); - - const nBuffer = new TextBuffer({text: - // 0 1 2 3 4 - '0000\n0003\n0004\n0005\n0006\n' + - // 5 6 7 8 9 10 11 12 13 14 15 - '0007\n0008\n0009\n0010\n0011\n0012\n0014\n0015\n0016\n0017\n0018\n' + - // 16 17 18 19 20 - '0019\n0020\n0021\n0022\n0023\n' + - // 21 22 23 - '0024\n0025\n No newline at end of file\n', - }); - const nLayers = buildLayers(nBuffer); - const nHunks = [ - new Hunk({ - oldStartRow: 3, oldRowCount: 3, newStartRow: 3, newRowCount: 5, // next row drift = +2 - marker: markRange(nLayers.hunk, 0, 4), - regions: [ - new Unchanged(markRange(nLayers.unchanged, 0)), // 0 - new Addition(markRange(nLayers.addition, 1)), // + 1 - new Unchanged(markRange(nLayers.unchanged, 2)), // 2 - new Addition(markRange(nLayers.addition, 3)), // + 3 - new Unchanged(markRange(nLayers.unchanged, 4)), // 4 - ], - }), - new Hunk({ - oldStartRow: 12, oldRowCount: 9, newStartRow: 14, newRowCount: 7, // next row drift = +2 -2 = 0 - marker: markRange(nLayers.hunk, 5, 15), - regions: [ - new Unchanged(markRange(nLayers.unchanged, 5)), // 5 - new Addition(markRange(nLayers.addition, 6)), // +6 - new Unchanged(markRange(nLayers.unchanged, 7, 9)), // 7 8 9 - new Deletion(markRange(nLayers.deletion, 10, 13)), // -10 -11 -12 -13 - new Addition(markRange(nLayers.addition, 14)), // +14 - new Unchanged(markRange(nLayers.unchanged, 15)), // 15 - ], - }), - new Hunk({ - oldStartRow: 26, oldRowCount: 4, newStartRow: 26, newRowCount: 3, // next row drift = 0 -1 = -1 - marker: markRange(nLayers.hunk, 16, 20), - regions: [ - new Unchanged(markRange(nLayers.unchanged, 16)), // 16 - new Addition(markRange(nLayers.addition, 17)), // +17 - new Deletion(markRange(nLayers.deletion, 18, 19)), // -18 -19 - new Unchanged(markRange(nLayers.unchanged, 20)), // 20 - ], - }), - new Hunk({ - oldStartRow: 32, oldRowCount: 1, newStartRow: 31, newRowCount: 2, - marker: markRange(nLayers.hunk, 22, 24), - regions: [ - new Unchanged(markRange(nLayers.unchanged, 22)), // 22 - new Addition(markRange(nLayers.addition, 23)), // +23 - new NoNewline(markRange(nLayers.noNewline, 24)), - ], - }), - ]; - const nextPatch = new Patch({status: 'modified', hunks: nHunks, buffer: nBuffer, layers: nLayers}); - - const nextRange = nextPatch.getNextSelectionRange(lastPatch, lastSelectedRows); - // Original buffer row 14 = the next changed row = new buffer row 11 - assert.deepEqual(nextRange, [[11, 0], [11, Infinity]]); - }); - - it('offsets the chosen selection index by hunks that were completely selected', function() { - const buffer = buildBuffer(11); - const layers = buildLayers(buffer); - const lastPatch = new Patch({ - status: 'modified', - hunks: [ - new Hunk({ - oldStartRow: 1, oldRowCount: 3, newStartRow: 1, newRowCount: 3, - marker: markRange(layers.hunk, 0, 5), - regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1, 2)), - new Deletion(markRange(layers.deletion, 3, 4)), - new Unchanged(markRange(layers.unchanged, 5)), - ], - }), - new Hunk({ - oldStartRow: 5, oldRowCount: 4, newStartRow: 5, newRowCount: 4, - marker: markRange(layers.hunk, 6, 11), - regions: [ - new Unchanged(markRange(layers.unchanged, 6)), - new Addition(markRange(layers.addition, 7, 8)), - new Deletion(markRange(layers.deletion, 9, 10)), - new Unchanged(markRange(layers.unchanged, 11)), - ], - }), - ], - buffer, - layers, - }); - // Select: - // * all changes from hunk 0 - // * partial addition (8 of 7-8) from hunk 1 - const lastSelectedRows = new Set([1, 2, 3, 4, 8]); - - const nextBuffer = new TextBuffer({text: '0006\n0007\n0008\n0009\n0010\n0011\n'}); - const nextLayers = buildLayers(nextBuffer); - const nextPatch = new Patch({ - status: 'modified', - hunks: [ - new Hunk({ - oldStartRow: 5, oldRowCount: 4, newStartRow: 5, newRowCount: 4, - marker: markRange(nextLayers.hunk, 0, 5), - regions: [ - new Unchanged(markRange(nextLayers.unchanged, 0)), - new Addition(markRange(nextLayers.addition, 1)), - new Deletion(markRange(nextLayers.deletion, 3, 4)), - new Unchanged(markRange(nextLayers.unchanged, 5)), - ], - }), - ], - buffer: nextBuffer, - layers: nextLayers, - }); - - const range = nextPatch.getNextSelectionRange(lastPatch, lastSelectedRows); - assert.deepEqual(range, [[3, 0], [3, Infinity]]); - }); - - it('selects the first row of the first change of the patch if no rows were selected before', function() { - const lastPatch = buildPatchFixture(); - const lastSelectedRows = new Set(); - - const buffer = lastPatch.getBuffer(); - const layers = buildLayers(buffer); - const nextPatch = new Patch({ - status: 'modified', - hunks: [ - new Hunk({ - oldStartRow: 1, oldRowCount: 3, newStartRow: 1, newRowCount: 4, - marker: markRange(layers.hunk, 0, 4), - regions: [ - new Unchanged(markRange(layers.unchanged, 0)), - new Addition(markRange(layers.addition, 1, 2)), - new Deletion(markRange(layers.deletion, 3)), - new Unchanged(markRange(layers.unchanged, 4)), - ], - }), - ], - buffer, - layers, - }); - - const range = nextPatch.getNextSelectionRange(lastPatch, lastSelectedRows); - assert.deepEqual(range, [[1, 0], [1, Infinity]]); + const marker = markRange(layers.patch, 0); + const patch = new Patch({status: 'modified', hunks: [], marker}); + assert.deepEqual(patch.getFirstChangeRange().serialize(), [[0, 0], [0, 0]]); }); }); @@ -789,10 +644,11 @@ describe('Patch', function() { new Unchanged(markRange(layers.unchanged, 9)), ], }); + const marker = markRange(layers.patch, 0, 9); - const p = new Patch({status: 'modified', hunks: [hunk0, hunk1], buffer, layers}); + const p = new Patch({status: 'modified', hunks: [hunk0, hunk1], marker}); - assert.strictEqual(p.toString(), [ + assert.strictEqual(p.toStringIn(buffer), [ '@@ -0,2 +0,3 @@\n', ' 0000\n', '+0001\n', @@ -820,9 +676,10 @@ describe('Patch', function() { new Unchanged(markRange(layers.unchanged, 5)), ], }); + const marker = markRange(layers.patch, 0, 5); - const p = new Patch({status: 'modified', hunks: [hunk], buffer, layers}); - assert.strictEqual(p.toString(), [ + const p = new Patch({status: 'modified', hunks: [hunk], marker}); + assert.strictEqual(p.toStringIn(buffer), [ '@@ -1,5 +1,5 @@\n', ' \n', ' \n', @@ -837,97 +694,15 @@ describe('Patch', function() { it('has a stubbed nullPatch counterpart', function() { const nullPatch = Patch.createNull(); assert.isNull(nullPatch.getStatus()); + assert.deepEqual(nullPatch.getMarker().getRange().serialize(), [[0, 0], [0, 0]]); + assert.deepEqual(nullPatch.getStartRange().serialize(), [[0, 0], [0, 0]]); assert.deepEqual(nullPatch.getHunks(), []); - assert.strictEqual(nullPatch.getBuffer().getText(), ''); - assert.strictEqual(nullPatch.getByteSize(), 0); - assert.isFalse(nullPatch.isPresent()); - assert.strictEqual(nullPatch.toString(), ''); assert.strictEqual(nullPatch.getChangedLineCount(), 0); + assert.isFalse(nullPatch.containsRow(0)); assert.strictEqual(nullPatch.getMaxLineNumberWidth(), 0); - assert.isUndefined(nullPatch.getHunkAt(0)); - assert.deepEqual(nullPatch.getFirstChangeRange(), [[0, 0], [0, 0]]); - assert.deepEqual(nullPatch.getNextSelectionRange(), [[0, 0], [0, 0]]); - }); - - it('adopts a buffer from a previous patch', function() { - const patch0 = buildPatchFixture(); - const buffer0 = patch0.getBuffer(); - const hunkLayer0 = patch0.getHunkLayer(); - const unchangedLayer0 = patch0.getUnchangedLayer(); - const additionLayer0 = patch0.getAdditionLayer(); - const deletionLayer0 = patch0.getDeletionLayer(); - const noNewlineLayer0 = patch0.getNoNewlineLayer(); - - const buffer1 = new TextBuffer({text: '0000\n0001\n0002\n0003\n0004\n No newline at end of file'}); - const layers1 = buildLayers(buffer1); - const hunks1 = [ - new Hunk({ - oldStartRow: 1, oldRowCount: 2, newStartRow: 1, newRowCount: 3, - sectionHeading: '0', - marker: markRange(layers1.hunk, 0, 2), - regions: [ - new Unchanged(markRange(layers1.unchanged, 0)), - new Addition(markRange(layers1.addition, 1)), - new Unchanged(markRange(layers1.unchanged, 2)), - ], - }), - new Hunk({ - oldStartRow: 5, oldRowCount: 2, newStartRow: 1, newRowCount: 3, - sectionHeading: '0', - marker: markRange(layers1.hunk, 3, 5), - regions: [ - new Unchanged(markRange(layers1.unchanged, 3)), - new Deletion(markRange(layers1.deletion, 4)), - new NoNewline(markRange(layers1.noNewline, 5)), - ], - }), - ]; - - const patch1 = new Patch({status: 'modified', hunks: hunks1, buffer: buffer1, layers: layers1}); - - assert.notStrictEqual(patch1.getBuffer(), patch0.getBuffer()); - assert.notStrictEqual(patch1.getHunkLayer(), hunkLayer0); - assert.notStrictEqual(patch1.getUnchangedLayer(), unchangedLayer0); - assert.notStrictEqual(patch1.getAdditionLayer(), additionLayer0); - assert.notStrictEqual(patch1.getDeletionLayer(), deletionLayer0); - assert.notStrictEqual(patch1.getNoNewlineLayer(), noNewlineLayer0); - - patch1.adoptBufferFrom(patch0); - - assert.strictEqual(patch1.getBuffer(), buffer0); - - const markerRanges = [ - ['hunk', patch1.getHunkLayer(), hunkLayer0], - ['unchanged', patch1.getUnchangedLayer(), unchangedLayer0], - ['addition', patch1.getAdditionLayer(), additionLayer0], - ['deletion', patch1.getDeletionLayer(), deletionLayer0], - ['noNewline', patch1.getNoNewlineLayer(), noNewlineLayer0], - ].reduce((obj, [key, layer1, layer0]) => { - assert.strictEqual(layer1, layer0, `Layer ${key} not inherited`); - obj[key] = layer1.getMarkers().map(marker => marker.getRange().serialize()); - return obj; - }, {}); - - assert.deepEqual(markerRanges, { - hunk: [ - [[0, 0], [2, 4]], - [[3, 0], [5, 26]], - ], - unchanged: [ - [[0, 0], [0, 4]], - [[2, 0], [2, 4]], - [[3, 0], [3, 4]], - ], - addition: [ - [[1, 0], [1, 4]], - ], - deletion: [ - [[4, 0], [4, 4]], - ], - noNewline: [ - [[5, 0], [5, 26]], - ], - }); + assert.deepEqual(nullPatch.getFirstChangeRange().serialize(), [[0, 0], [0, 0]]); + assert.strictEqual(nullPatch.toStringIn(), ''); + assert.isFalse(nullPatch.isPresent()); }); }); @@ -951,6 +726,7 @@ function buildBuffer(lines, noNewline = false) { function buildLayers(buffer) { return { + patch: buffer.addMarkerLayer(), hunk: buffer.addMarkerLayer(), unchanged: buffer.addMarkerLayer(), addition: buffer.addMarkerLayer(), @@ -965,6 +741,8 @@ function markRange(buffer, start, end = start) { function buildPatchFixture() { const buffer = buildBuffer(26, true); + buffer.append('\n\n\n\n\n\n'); + const layers = buildLayers(buffer); const hunks = [ @@ -1014,6 +792,12 @@ function buildPatchFixture() { ], }), ]; + const marker = markRange(layers.patch, 0, 26); - return new Patch({status: 'modified', hunks, buffer, layers}); + return { + patch: new Patch({status: 'modified', hunks, marker}), + buffer, + layers, + marker, + }; } diff --git a/test/models/repository.test.js b/test/models/repository.test.js index 89748b06279..f56f08f1eae 100644 --- a/test/models/repository.test.js +++ b/test/models/repository.test.js @@ -110,7 +110,7 @@ describe('Repository', function() { assert.strictEqual(await repository.getHeadDescription(), '(no repository)'); assert.strictEqual(await repository.getOperationStates(), nullOperationStates); assert.strictEqual(await repository.getCommitMessage(), ''); - assert.isFalse((await repository.getFilePatchForPath('anything.txt')).isPresent()); + assert.isFalse((await repository.getFilePatchForPath('anything.txt')).anyPresent()); }); it('returns a rejecting promise', async function() { @@ -343,7 +343,9 @@ describe('Repository', function() { await repo.stageFileSymlinkChange(deletedSymlinkAddedFilePath); assert.isNull(await indexModeAndOid(deletedSymlinkAddedFilePath)); const unstagedFilePatch = await repo.getFilePatchForPath(deletedSymlinkAddedFilePath, {staged: false}); - assert.equal(unstagedFilePatch.getStatus(), 'added'); + assert.lengthOf(unstagedFilePatch.getFilePatches(), 1); + const [uFilePatch] = unstagedFilePatch.getFilePatches(); + assert.equal(uFilePatch.getStatus(), 'added'); assert.equal(unstagedFilePatch.toString(), dedent` diff --git a/symlink.txt b/symlink.txt new file mode 100644 @@ -362,7 +364,9 @@ describe('Repository', function() { await repo.stageFileSymlinkChange(deletedFileAddedSymlinkPath); assert.isNull(await indexModeAndOid(deletedFileAddedSymlinkPath)); const stagedFilePatch = await repo.getFilePatchForPath(deletedFileAddedSymlinkPath, {staged: true}); - assert.equal(stagedFilePatch.getStatus(), 'deleted'); + assert.lengthOf(stagedFilePatch.getFilePatches(), 1); + const [sFilePatch] = stagedFilePatch.getFilePatches(); + assert.equal(sFilePatch.getStatus(), 'deleted'); assert.equal(stagedFilePatch.toString(), dedent` diff --git a/a.txt b/a.txt deleted file mode 100644 @@ -403,7 +407,7 @@ describe('Repository', function() { }); describe('getFilePatchForPath', function() { - it('returns cached FilePatch objects if they exist', async function() { + it('returns cached MultiFilePatch objects if they exist', async function() { const workingDirPath = await cloneRepository('multiple-commits'); const repo = new Repository(workingDirPath); await repo.getLoadPromise(); @@ -418,7 +422,7 @@ describe('Repository', function() { assert.equal(await repo.getFilePatchForPath('file.txt', {staged: true}), stagedFilePatch); }); - it('returns new FilePatch object after repository refresh', async function() { + it('returns new MultiFilePatch object after repository refresh', async function() { const workingDirPath = await cloneRepository('three-files'); const repo = new Repository(workingDirPath); await repo.getLoadPromise(); @@ -433,13 +437,32 @@ describe('Repository', function() { assert.isTrue((await repo.getFilePatchForPath('a.txt')).isEqual(filePatchA)); }); - it('returns a nullFilePatch for unknown paths', async function() { + it('returns an empty MultiFilePatch for unknown paths', async function() { const workingDirPath = await cloneRepository('multiple-commits'); const repo = new Repository(workingDirPath); await repo.getLoadPromise(); const patch = await repo.getFilePatchForPath('no.txt'); - assert.isFalse(patch.isPresent()); + assert.isFalse(patch.anyPresent()); + }); + }); + + describe('getStagedChangesPatch', function() { + it('computes a multi-file patch of the staged changes', async function() { + const workdir = await cloneRepository('each-staging-group'); + const repo = new Repository(workdir); + await repo.getLoadPromise(); + + await fs.writeFile(path.join(workdir, 'unstaged-1.txt'), 'Unstaged file'); + + await fs.writeFile(path.join(workdir, 'staged-1.txt'), 'Staged file'); + await fs.writeFile(path.join(workdir, 'staged-2.txt'), 'Staged file'); + await repo.stageFiles(['staged-1.txt', 'staged-2.txt']); + + const mp = await repo.getStagedChangesPatch(); + + assert.lengthOf(mp.getFilePatches(), 2); + assert.deepEqual(mp.getFilePatches().map(fp => fp.getPath()), ['staged-1.txt', 'staged-2.txt']); }); }); @@ -1089,6 +1112,44 @@ describe('Repository', function() { }); }); + describe('saveDiscardHistory()', function() { + let repository; + + beforeEach(async function() { + const workdir = await cloneRepository('three-files'); + repository = new Repository(workdir); + await repository.getLoadPromise(); + }); + + it('does nothing on a destroyed repository', async function() { + repository.destroy(); + + await repository.saveDiscardHistory(); + + assert.isNull(await repository.getConfig('atomGithub.historySha')); + }); + + it('does nothing if the repository is destroyed after the blob is created', async function() { + let resolveCreateHistoryBlob = () => {}; + sinon.stub(repository, 'createDiscardHistoryBlob').callsFake(() => new Promise(resolve => { + resolveCreateHistoryBlob = resolve; + })); + + const promise = repository.saveDiscardHistory(); + repository.destroy(); + resolveCreateHistoryBlob('nope'); + await promise; + + assert.isNull(await repository.getConfig('atomGithub.historySha')); + }); + + it('creates a blob and saves it in the git config', async function() { + assert.isNull(await repository.getConfig('atomGithub.historySha')); + await repository.saveDiscardHistory(); + assert.match(await repository.getConfig('atomGithub.historySha'), /^[a-z0-9]{40}$/); + }); + }); + describe('merge conflicts', function() { describe('getMergeConflicts()', function() { it('returns a promise resolving to an array of MergeConflict objects', async function() { @@ -1476,6 +1537,10 @@ describe('Repository', function() { 'getRemotes', () => repository.getRemotes(), ); + calls.set( + 'getStagedChangesPatch', + () => repository.getStagedChangesPatch(), + ); const withFile = fileName => { calls.set( diff --git a/test/views/commit-view.test.js b/test/views/commit-view.test.js index 7666fa22dc4..7a3513f4f4e 100644 --- a/test/views/commit-view.test.js +++ b/test/views/commit-view.test.js @@ -43,6 +43,7 @@ describe('CommitView', function() { stagedChangesExist={false} mergeConflictsExist={false} isCommitting={false} + commitPreviewActive={false} deactivateCommitBox={false} maximumCharacterLimit={72} messageBuffer={messageBuffer} @@ -51,6 +52,8 @@ describe('CommitView', function() { abortMerge={noop} toggleExpandedCommitMessageEditor={noop} updateSelectedCoAuthors={noop} + toggleCommitPreview={noop} + activateCommitPreview={noop} /> ); }); @@ -359,6 +362,160 @@ describe('CommitView', function() { assert.isFalse(wrapper.instance().hasFocusEditor()); }); + describe('advancing focus', function() { + let wrapper, instance, event; + + beforeEach(function() { + wrapper = mount(app); + instance = wrapper.instance(); + event = {stopPropagation: sinon.spy()}; + + sinon.spy(instance, 'setFocus'); + }); + + it('does nothing and returns false if the focus is not in the commit view', function() { + sinon.stub(instance, 'getFocus').returns(null); + assert.isFalse(instance.advanceFocus(event)); + assert.isFalse(instance.setFocus.called); + assert.isFalse(event.stopPropagation.called); + }); + + it('moves focus to the commit editor if the commit preview button is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_PREVIEW_BUTTON); + + assert.isTrue(instance.advanceFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.EDITOR)); + assert.isTrue(event.stopPropagation.called); + }); + + it('inserts a tab if the commit editor is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.EDITOR); + + assert.isTrue(instance.advanceFocus(event)); + assert.isFalse(event.stopPropagation.called); + }); + + it('moves focus to the commit button if the coauthor form is focused and no merge is in progress', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COAUTHOR_INPUT); + + assert.isTrue(instance.advanceFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.COMMIT_BUTTON)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the abort merge button if the coauthor form is focused and a merge is in progress', function() { + wrapper.setProps({isMerging: true}); + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COAUTHOR_INPUT); + + assert.isTrue(instance.advanceFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.ABORT_MERGE_BUTTON)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the commit button if the abort merge button is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.ABORT_MERGE_BUTTON); + + assert.isTrue(instance.advanceFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.COMMIT_BUTTON)); + assert.isTrue(event.stopPropagation.called); + }); + + it('does nothing and returns true if the commit button is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_BUTTON); + + assert.isTrue(instance.advanceFocus(event)); + assert.isFalse(instance.setFocus.called); + assert.isTrue(event.stopPropagation.called); + }); + }); + + describe('retreating focus', function() { + let wrapper, instance, event; + + beforeEach(function() { + wrapper = mount(app); + instance = wrapper.instance(); + event = {stopPropagation: sinon.spy()}; + + sinon.spy(instance, 'setFocus'); + }); + + it('does nothing and returns false if the focus is not in the commit view', function() { + sinon.stub(instance, 'getFocus').returns(null); + + assert.isFalse(instance.retreatFocus(event)); + assert.isFalse(instance.setFocus.called); + assert.isFalse(event.stopPropagation.called); + }); + + it('moves focus to the abort merge button if the commit button is focused and a merge is in progress', function() { + wrapper.setProps({isMerging: true}); + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_BUTTON); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.ABORT_MERGE_BUTTON)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the editor if the commit button is focused and no merge is underway', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_BUTTON); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.EDITOR)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the co-author form if it is visible, the commit button is focused, and no merge', function() { + wrapper.setState({showCoAuthorInput: true}); + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_BUTTON); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.COAUTHOR_INPUT)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the co-author form if it is visible and the abort merge button is in focus', function() { + wrapper.setState({showCoAuthorInput: true}); + sinon.stub(instance, 'getFocus').returns(CommitView.focus.ABORT_MERGE_BUTTON); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.COAUTHOR_INPUT)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the commit editor if the abort merge button is in focus', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.ABORT_MERGE_BUTTON); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.EDITOR)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the commit editor if the co-author form is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COAUTHOR_INPUT); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.EDITOR)); + assert.isTrue(event.stopPropagation.called); + }); + + it('moves focus to the commit preview button if the commit editor is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.EDITOR); + + assert.isTrue(instance.retreatFocus(event)); + assert.isTrue(instance.setFocus.calledWith(CommitView.focus.COMMIT_PREVIEW_BUTTON)); + assert.isTrue(event.stopPropagation.called); + }); + + it('does nothing and returns false if the commit preview button is focused', function() { + sinon.stub(instance, 'getFocus').returns(CommitView.focus.COMMIT_PREVIEW_BUTTON); + + assert.isFalse(instance.retreatFocus(event)); + assert.isFalse(instance.setFocus.called); + assert.isFalse(event.stopPropagation.called); + }); + }); + it('remembers the current focus', function() { const wrapper = mount(React.cloneElement(app, {isMerging: true})); wrapper.instance().toggleCoAuthorInput(); @@ -369,6 +526,7 @@ describe('CommitView', function() { ['.github-CommitView-abortMerge', CommitView.focus.ABORT_MERGE_BUTTON], ['.github-CommitView-commit', CommitView.focus.COMMIT_BUTTON], ['.github-CommitView-coAuthorEditor input', CommitView.focus.COAUTHOR_INPUT], + ['.github-CommitView-commitPreview', CommitView.focus.COMMIT_PREVIEW_BUTTON], ]; for (const [selector, focus, subselector] of foci) { let target = wrapper.find(selector).getDOMNode(); @@ -382,6 +540,7 @@ describe('CommitView', function() { const holders = [ 'refEditorComponent', 'refEditorModel', 'refAbortMergeButton', 'refCommitButton', 'refCoAuthorSelect', + 'refCommitPreviewButton', ].map(ivar => wrapper.instance()[ivar]); for (const holder of holders) { holder.setter(null); @@ -390,6 +549,15 @@ describe('CommitView', function() { }); describe('restoring focus', function() { + it('to the commit preview button', function() { + const wrapper = mount(app); + const element = wrapper.find('.github-CommitView-commitPreview').getDOMNode(); + sinon.spy(element, 'focus'); + + assert.isTrue(wrapper.instance().setFocus(CommitView.focus.COMMIT_PREVIEW_BUTTON)); + assert.isTrue(element.focus.called); + }); + it('to the editor', function() { const wrapper = mount(app); const element = wrapper.find('AtomTextEditor').getDOMNode().querySelector('atom-text-editor'); @@ -448,6 +616,7 @@ describe('CommitView', function() { // Simulate an unmounted component by clearing out RefHolders manually. const holders = [ 'refEditorComponent', 'refEditorModel', 'refAbortMergeButton', 'refCommitButton', 'refCoAuthorSelect', + 'refCommitPreviewButton', ].map(ivar => wrapper.instance()[ivar]); for (const holder of holders) { holder.setter(null); @@ -458,4 +627,44 @@ describe('CommitView', function() { } }); }); + + describe('commit preview button', function() { + it('is enabled when there is staged changes', function() { + const wrapper = shallow(React.cloneElement(app, { + stagedChangesExist: true, + })); + assert.isFalse(wrapper.find('.github-CommitView-commitPreview').prop('disabled')); + }); + + it('is disabled when there\'s no staged changes', function() { + const wrapper = shallow(React.cloneElement(app, { + stagedChangesExist: false, + })); + assert.isTrue(wrapper.find('.github-CommitView-commitPreview').prop('disabled')); + }); + + it('calls a callback when the button is clicked', function() { + const toggleCommitPreview = sinon.spy(); + + const wrapper = shallow(React.cloneElement(app, { + toggleCommitPreview, + stagedChangesExist: true, + })); + + wrapper.find('.github-CommitView-commitPreview').simulate('click'); + assert.isTrue(toggleCommitPreview.called); + }); + + it('displays correct button text depending on prop value', function() { + const wrapper = shallow(app); + + assert.strictEqual(wrapper.find('.github-CommitView-commitPreview').text(), 'See All Staged Changes'); + + wrapper.setProps({commitPreviewActive: true}); + assert.strictEqual(wrapper.find('.github-CommitView-commitPreview').text(), 'Hide All Staged Changes'); + + wrapper.setProps({commitPreviewActive: false}); + assert.strictEqual(wrapper.find('.github-CommitView-commitPreview').text(), 'See All Staged Changes'); + }); + }); }); diff --git a/test/views/file-patch-header-view.test.js b/test/views/file-patch-header-view.test.js index d56eb3605bb..2d4e0acb651 100644 --- a/test/views/file-patch-header-view.test.js +++ b/test/views/file-patch-header-view.test.js @@ -1,9 +1,13 @@ import React from 'react'; import {shallow} from 'enzyme'; +import path from 'path'; import FilePatchHeaderView from '../../lib/views/file-patch-header-view'; +import ChangedFileItem from '../../lib/items/changed-file-item'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; describe('FilePatchHeaderView', function() { + const relPath = path.join('dir', 'a.txt'); let atomEnv; beforeEach(function() { @@ -17,11 +21,13 @@ describe('FilePatchHeaderView', function() { function buildApp(overrideProps = {}) { return ( n.prop('title') === tooltip)); }; } @@ -116,12 +132,24 @@ describe('FilePatchHeaderView', function() { it('includes a toggle to unstaged button when staged', createStagedPatchToggleTest(props)); }); - it('includes an open file button', function() { - const openFile = sinon.stub(); - const wrapper = shallow(buildApp({openFile})); + describe('the jump-to-file button', function() { + it('calls the jump to file file action prop', function() { + const openFile = sinon.stub(); + const wrapper = shallow(buildApp({openFile})); - wrapper.find('button.icon-code').simulate('click'); - assert.isTrue(openFile.called); + wrapper.find('button.icon-code').simulate('click'); + assert.isTrue(openFile.called); + }); + + it('is singular when selections exist within a single file patch', function() { + const wrapper = shallow(buildApp({hasMultipleFileSelections: false})); + assert.strictEqual(wrapper.find('button.icon-code').text(), 'Jump To File'); + }); + + it('is plural when selections exist within multiple file patches', function() { + const wrapper = shallow(buildApp({hasMultipleFileSelections: true})); + assert.strictEqual(wrapper.find('button.icon-code').text(), 'Jump To Files'); + }); }); function createToggleFileTest({stagingStatus, buttonClass, oppositeButtonClass}) { diff --git a/test/views/git-tab-view.test.js b/test/views/git-tab-view.test.js index ba684fc246c..c21f152e91c 100644 --- a/test/views/git-tab-view.test.js +++ b/test/views/git-tab-view.test.js @@ -83,17 +83,8 @@ describe('GitTabView', function() { event = {stopPropagation: sinon.spy()}; }); - it('does nothing if the commit controller has focus', async function() { - sinon.stub(commitController, 'hasFocus').returns(true); - sinon.spy(stagingView, 'activateNextList'); - - await wrapper.instance().advanceFocus(event); - - assert.isFalse(event.stopPropagation.called); - assert.isFalse(stagingView.activateNextList.called); - }); - it('activates the next staging view list and stops', async function() { + sinon.stub(commitController, 'advanceFocus').returns(false); sinon.stub(stagingView, 'activateNextList').resolves(true); sinon.spy(commitController, 'setFocus'); @@ -104,16 +95,27 @@ describe('GitTabView', function() { assert.isFalse(commitController.setFocus.called); }); - it('moves focus to the commit message editor from the end of the staging view', async function() { + it('moves focus to the commit preview button from the end of the staging view', async function() { + sinon.stub(commitController, 'advanceFocus').returns(false); sinon.stub(stagingView, 'activateNextList').resolves(false); sinon.stub(commitController, 'setFocus').returns(true); await wrapper.instance().advanceFocus(event); - assert.isTrue(commitController.setFocus.calledWith(GitTabView.focus.EDITOR)); + assert.isTrue(commitController.setFocus.calledWith(GitTabView.focus.COMMIT_PREVIEW_BUTTON)); assert.isTrue(event.stopPropagation.called); }); + it('advances focus within the commit view', async function() { + sinon.stub(commitController, 'advanceFocus').returns(true); + sinon.spy(stagingView, 'activateNextList'); + + await wrapper.instance().advanceFocus(event); + + assert.isTrue(commitController.advanceFocus.called); + assert.isFalse(stagingView.activateNextList.called); + }); + it('does nothing if refs are unavailable', async function() { wrapper.instance().refCommitController.setter(null); @@ -135,20 +137,8 @@ describe('GitTabView', function() { event = {stopPropagation: sinon.spy()}; }); - it('focuses the last staging list if the commit editor has focus', async function() { - sinon.stub(commitController, 'hasFocus').returns(true); - sinon.stub(commitController, 'hasFocusEditor').returns(true); - sinon.stub(stagingView, 'activateLastList').resolves(true); - - await wrapper.instance().retreatFocus(event); - - assert.isTrue(stagingView.activateLastList.called); - assert.isTrue(event.stopPropagation.called); - }); - - it('does nothing if the commit controller has focus but not in its editor', async function() { - sinon.stub(commitController, 'hasFocus').returns(true); - sinon.stub(commitController, 'hasFocusEditor').returns(false); + it('does nothing if the commit controller has focus but not in the preview button', async function() { + sinon.stub(commitController, 'retreatFocus').returns(true); sinon.spy(stagingView, 'activateLastList'); sinon.spy(stagingView, 'activatePreviousList'); @@ -159,8 +149,19 @@ describe('GitTabView', function() { assert.isFalse(event.stopPropagation.called); }); + it('focuses the last staging list if the commit preview button has focus', async function() { + sinon.stub(commitController, 'retreatFocus').returns(false); + sinon.stub(commitController, 'hasFocusAtBeginning').returns(true); + sinon.stub(stagingView, 'activateLastList').resolves(true); + + await wrapper.instance().retreatFocus(event); + + assert.isTrue(stagingView.activateLastList.called); + assert.isTrue(event.stopPropagation.called); + }); + it('activates the previous staging list and stops', async function() { - sinon.stub(commitController, 'hasFocus').returns(false); + sinon.stub(commitController, 'retreatFocus').returns(false); sinon.stub(stagingView, 'activatePreviousList').resolves(true); await wrapper.instance().retreatFocus(event); @@ -224,4 +225,13 @@ describe('GitTabView', function() { wrapper.prop('refRoot').setter(null); assert.isFalse(wrapper.instance().hasFocus()); }); + + it('imperatively focuses the commit preview button', async function() { + const wrapper = mount(await buildApp()); + + const setFocus = sinon.spy(wrapper.find('CommitController').instance(), 'setFocus'); + wrapper.instance().focusAndSelectCommitPreviewButton(); + assert.isTrue(setFocus.called); + assert.isTrue(setFocus.lastCall.returnValue); + }); }); diff --git a/test/views/file-patch-view.test.js b/test/views/multi-file-patch-view.test.js similarity index 60% rename from test/views/file-patch-view.test.js rename to test/views/multi-file-patch-view.test.js index f0c70afd89d..7d86378a065 100644 --- a/test/views/file-patch-view.test.js +++ b/test/views/multi-file-patch-view.test.js @@ -2,13 +2,15 @@ import React from 'react'; import {shallow, mount} from 'enzyme'; import {cloneRepository, buildRepository} from '../helpers'; -import FilePatchView from '../../lib/views/file-patch-view'; -import {buildFilePatch} from '../../lib/models/patch'; +import MultiFilePatchView from '../../lib/views/multi-file-patch-view'; +import {multiFilePatchBuilder} from '../builder/patch'; import {nullFile} from '../../lib/models/patch/file'; import FilePatch from '../../lib/models/patch/file-patch'; +import RefHolder from '../../lib/models/ref-holder'; +import CommitPreviewItem from '../../lib/items/commit-preview-item'; -describe('FilePatchView', function() { - let atomEnv, workspace, repository, filePatch; +describe('MultiFilePatchView', function() { + let atomEnv, workspace, repository, filePatches; beforeEach(async function() { atomEnv = global.buildAtomEnvironment(); @@ -17,26 +19,20 @@ describe('FilePatchView', function() { const workdirPath = await cloneRepository(); repository = await buildRepository(workdirPath); - // path.txt: unstaged changes - filePatch = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 4, oldLineCount: 3, newStartLine: 4, newLineCount: 4, - heading: 'zero', - lines: [' 0000', '+0001', '+0002', '-0003', ' 0004'], - }, - { - oldStartLine: 8, oldLineCount: 3, newStartLine: 9, newLineCount: 3, - heading: 'one', - lines: [' 0005', '+0006', '-0007', ' 0008'], - }, - ], - }]); + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(4); + h.unchanged('0000').added('0001', '0002').deleted('0003').unchanged('0004'); + }); + fp.addHunk(h => { + h.oldRow(8); + h.unchanged('0005').added('0006').deleted('0007').unchanged('0008'); + }); + }).build(); + + filePatches = multiFilePatch; }); afterEach(function() { @@ -48,11 +44,13 @@ describe('FilePatchView', function() { relPath: 'path.txt', stagingStatus: 'unstaged', isPartiallyStaged: false, - filePatch, + multiFilePatch: filePatches, hasUndoHistory: false, selectionMode: 'line', selectedRows: new Set(), + hasMultipleFileSelections: false, repository, + isActive: true, workspace, config: atomEnv.config, @@ -63,7 +61,7 @@ describe('FilePatchView', function() { selectedRowsChanged: () => {}, diveIntoMirrorPatch: () => {}, - surfaceFile: () => {}, + surface: () => {}, openFile: () => {}, toggleFile: () => {}, toggleRows: () => {}, @@ -72,10 +70,12 @@ describe('FilePatchView', function() { undoLastDiscard: () => {}, discardRows: () => {}, + itemType: CommitPreviewItem, + ...overrideProps, }; - return ; + return ; } it('renders the file header', function() { @@ -87,16 +87,57 @@ describe('FilePatchView', function() { const undoLastDiscard = sinon.spy(); const wrapper = shallow(buildApp({undoLastDiscard})); - wrapper.find('FilePatchHeaderView').prop('undoLastDiscard')(); + wrapper.find('FilePatchHeaderView').first().prop('undoLastDiscard')(); + + assert.lengthOf(filePatches.getFilePatches(), 1); + const [filePatch] = filePatches.getFilePatches(); + assert.isTrue(undoLastDiscard.calledWith(filePatch, {eventSource: 'button'})); + }); + + it('dives into the mirror patch from the file header button', function() { + const diveIntoMirrorPatch = sinon.spy(); + const wrapper = shallow(buildApp({diveIntoMirrorPatch})); + + wrapper.find('FilePatchHeaderView').prop('diveIntoMirrorPatch')(); + + assert.lengthOf(filePatches.getFilePatches(), 1); + const [filePatch] = filePatches.getFilePatches(); + assert.isTrue(diveIntoMirrorPatch.calledWith(filePatch)); + }); + + it('toggles a file from staged to unstaged from the file header button', function() { + const toggleFile = sinon.spy(); + const wrapper = shallow(buildApp({toggleFile})); + + wrapper.find('FilePatchHeaderView').prop('toggleFile')(); - assert.isTrue(undoLastDiscard.calledWith({eventSource: 'button'})); + assert.lengthOf(filePatches.getFilePatches(), 1); + const [filePatch] = filePatches.getFilePatches(); + assert.isTrue(toggleFile.calledWith(filePatch)); + }); + + it('passes hasMultipleFileSelections to all file headers', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => fp.setOldFile(f => f.path('0'))) + .addFilePatch(fp => fp.setOldFile(f => f.path('1'))) + .build(); + + const wrapper = shallow(buildApp({multiFilePatch, hasMultipleFileSelections: true})); + + assert.isTrue(wrapper.find('FilePatchHeaderView[relPath="0"]').prop('hasMultipleFileSelections')); + assert.isTrue(wrapper.find('FilePatchHeaderView[relPath="1"]').prop('hasMultipleFileSelections')); + + wrapper.setProps({hasMultipleFileSelections: false}); + + assert.isFalse(wrapper.find('FilePatchHeaderView[relPath="0"]').prop('hasMultipleFileSelections')); + assert.isFalse(wrapper.find('FilePatchHeaderView[relPath="1"]').prop('hasMultipleFileSelections')); }); it('renders the file patch within an editor', function() { const wrapper = mount(buildApp()); const editor = wrapper.find('AtomTextEditor'); - assert.strictEqual(editor.instance().getModel().getText(), filePatch.getBuffer().getText()); + assert.strictEqual(editor.instance().getModel().getText(), filePatches.getBuffer().getText()); }); it('sets the root class when in hunk selection mode', function() { @@ -106,6 +147,36 @@ describe('FilePatchView', function() { assert.isTrue(wrapper.find('.github-FilePatchView--hunkMode').exists()); }); + describe('initial selection', function() { + it('selects the origin with an empty FilePatch', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => fp.empty()) + .build(); + const wrapper = mount(buildApp({multiFilePatch})); + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + + assert.deepEqual(editor.getSelectedBufferRanges().map(r => r.serialize()), [[[0, 0], [0, 0]]]); + }); + + it('selects the first hunk with a populated file patch', function() { + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-0')); + fp.addHunk(h => h.unchanged('0').added('1', '2').deleted('3').unchanged('4')); + fp.addHunk(h => h.added('5', '6')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('file-1')); + fp.addHunk(h => h.deleted('7', '8', '9')); + }) + .build(); + const wrapper = mount(buildApp({multiFilePatch})); + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + + assert.deepEqual(editor.getSelectedBufferRanges().map(r => r.serialize()), [[[0, 0], [4, 1]]]); + }); + }); + it('preserves the selection index when a new file patch arrives in line selection mode', function() { const selectedRowsChanged = sinon.spy(); const wrapper = mount(buildApp({ @@ -114,22 +185,16 @@ describe('FilePatchView', function() { selectedRowsChanged, })); - const nextPatch = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 5, oldLineCount: 4, newStartLine: 5, newLineCount: 3, - heading: 'heading', - lines: [' 0000', '+0001', ' 0002', '-0003', ' 0004'], - }, - ], - }]); - - wrapper.setProps({filePatch: nextPatch}); + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(5); + h.unchanged('0000').added('0001').unchanged('0002').deleted('0003').unchanged('0004'); + }); + }).build(); + + wrapper.setProps({multiFilePatch}); assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [3]); assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'line'); @@ -144,71 +209,53 @@ describe('FilePatchView', function() { }); it('selects the next full hunk when a new file patch arrives in hunk selection mode', function() { - const multiHunkPatch = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 10, oldLineCount: 4, newStartLine: 10, newLineCount: 4, - heading: '0', - lines: [' 0000', '+0001', ' 0002', '-0003', ' 0004'], - }, - { - oldStartLine: 20, oldLineCount: 3, newStartLine: 20, newLineCount: 4, - heading: '1', - lines: [' 0005', '+0006', '+0007', '-0008', ' 0009'], - }, - { - oldStartLine: 30, oldLineCount: 3, newStartLine: 31, newLineCount: 3, - heading: '2', - lines: [' 0010', '+0011', '-0012', ' 0013'], - }, - { - oldStartLine: 40, oldLineCount: 4, newStartLine: 41, newLineCount: 4, - heading: '3', - lines: [' 0014', '-0015', ' 0016', '+0017', ' 0018'], - }, - ], - }]); + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0000').added('0001').unchanged('0002').deleted('0003').unchanged('0004'); + }); + fp.addHunk(h => { + h.oldRow(20); + h.unchanged('0005').added('0006').added('0007').deleted('0008').unchanged('0009'); + }); + fp.addHunk(h => { + h.oldRow(30); + h.unchanged('0010').added('0011').deleted('0012').unchanged('0013'); + }); + fp.addHunk(h => { + h.oldRow(40); + h.unchanged('0014').deleted('0015').unchanged('0016').added('0017').unchanged('0018'); + }); + }).build(); const selectedRowsChanged = sinon.spy(); const wrapper = mount(buildApp({ - filePatch: multiHunkPatch, + multiFilePatch, selectedRows: new Set([6, 7, 8]), selectionMode: 'hunk', selectedRowsChanged, })); - const nextPatch = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 10, oldLineCount: 4, newStartLine: 10, newLineCount: 4, - heading: '0', - lines: [' 0000', '+0001', ' 0002', '-0003', ' 0004'], - }, - { - oldStartLine: 30, oldLineCount: 3, newStartLine: 30, newLineCount: 3, - heading: '2', - // 5 6 7 8 - lines: [' 0010', '+0011', '-0012', ' 0013'], - }, - { - oldStartLine: 40, oldLineCount: 4, newStartLine: 40, newLineCount: 4, - heading: '3', - lines: [' 0014', '-0015', ' 0016', '+0017', ' 0018'], - }, - ], - }]); - - wrapper.setProps({filePatch: nextPatch}); + const {multiFilePatch: nextMfp} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0000').added('0001').unchanged('0002').deleted('0003').unchanged('0004'); + }); + fp.addHunk(h => { + h.oldRow(30); + h.unchanged('0010').added('0011').deleted('0012').unchanged('0013'); + }); + fp.addHunk(h => { + h.oldRow(40); + h.unchanged('0014').deleted('0015').unchanged('0016').added('0017').unchanged('0018'); + }); + }).build(); + + wrapper.setProps({multiFilePatch: nextMfp}); assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [6, 7]); assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'hunk'); @@ -233,24 +280,67 @@ describe('FilePatchView', function() { assert.isTrue(window.removeEventListener.calledWith('mouseup', handler)); }); + describe('refInitialFocus', function() { + it('is set to its editor', function() { + const refInitialFocus = new RefHolder(); + const wrapper = mount(buildApp({refInitialFocus})); + + assert.isFalse(refInitialFocus.isEmpty()); + assert.strictEqual( + refInitialFocus.get(), + wrapper.find('AtomTextEditor').getDOMNode().querySelector('atom-text-editor'), + ); + }); + + it('may be swapped out for a new RefHolder', function() { + const refInitialFocus0 = new RefHolder(); + const wrapper = mount(buildApp({refInitialFocus: refInitialFocus0})); + const editorElement = wrapper.find('AtomTextEditor').getDOMNode().querySelector('atom-text-editor'); + + assert.strictEqual(refInitialFocus0.getOr(null), editorElement); + + const refInitialFocus1 = new RefHolder(); + wrapper.setProps({refInitialFocus: refInitialFocus1}); + + assert.isTrue(refInitialFocus0.isEmpty()); + assert.strictEqual(refInitialFocus1.getOr(null), editorElement); + + wrapper.setProps({refInitialFocus: null}); + + assert.isTrue(refInitialFocus0.isEmpty()); + assert.isTrue(refInitialFocus1.isEmpty()); + + wrapper.setProps({refInitialFocus: refInitialFocus0}); + + assert.strictEqual(refInitialFocus0.getOr(null), editorElement); + assert.isTrue(refInitialFocus1.isEmpty()); + }); + }); + describe('executable mode changes', function() { it('does not render if the mode has not changed', function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '100644'}), - newFile: filePatch.getNewFile().clone({mode: '100644'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '100644'}), + newFile: fp.getNewFile().clone({mode: '100644'}), + })], }); - const wrapper = shallow(buildApp({filePatch: fp})); + const wrapper = shallow(buildApp({multiFilePatch: mfp})); assert.isFalse(wrapper.find('FilePatchMetaView[title="Mode change"]').exists()); }); it('renders change details within a meta container', function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '100644'}), - newFile: filePatch.getNewFile().clone({mode: '100755'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '100644'}), + newFile: fp.getNewFile().clone({mode: '100755'}), + })], }); - const wrapper = mount(buildApp({filePatch: fp, stagingStatus: 'unstaged'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, stagingStatus: 'unstaged'})); const meta = wrapper.find('FilePatchMetaView[title="Mode change"]'); assert.strictEqual(meta.prop('actionIcon'), 'icon-move-down'); @@ -261,13 +351,17 @@ describe('FilePatchView', function() { }); it("stages or unstages the mode change when the meta container's action is triggered", function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '100644'}), - newFile: filePatch.getNewFile().clone({mode: '100755'}), + const [fp] = filePatches.getFilePatches(); + + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '100644'}), + newFile: fp.getNewFile().clone({mode: '100755'}), + })], }); const toggleModeChange = sinon.stub(); - const wrapper = shallow(buildApp({filePatch: fp, stagingStatus: 'staged', toggleModeChange})); + const wrapper = mount(buildApp({multiFilePatch: mfp, stagingStatus: 'staged', toggleModeChange})); const meta = wrapper.find('FilePatchMetaView[title="Mode change"]'); assert.isTrue(meta.exists()); @@ -281,22 +375,28 @@ describe('FilePatchView', function() { describe('symlink changes', function() { it('does not render if the symlink status is unchanged', function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '100644'}), - newFile: filePatch.getNewFile().clone({mode: '100755'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '100644'}), + newFile: fp.getNewFile().clone({mode: '100755'}), + })], }); - const wrapper = mount(buildApp({filePatch: fp})); + const wrapper = mount(buildApp({multiFilePatch: mfp})); assert.lengthOf(wrapper.find('FilePatchMetaView').filterWhere(v => v.prop('title').startsWith('Symlink')), 0); }); it('renders symlink change information within a meta container', function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), - newFile: filePatch.getNewFile().clone({mode: '120000', symlink: '/new.txt'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), + newFile: fp.getNewFile().clone({mode: '120000', symlink: '/new.txt'}), + })], }); - const wrapper = mount(buildApp({filePatch: fp, stagingStatus: 'unstaged'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, stagingStatus: 'unstaged'})); const meta = wrapper.find('FilePatchMetaView[title="Symlink changed"]'); assert.isTrue(meta.exists()); assert.strictEqual(meta.prop('actionIcon'), 'icon-move-down'); @@ -309,12 +409,15 @@ describe('FilePatchView', function() { it('stages or unstages the symlink change', function() { const toggleSymlinkChange = sinon.stub(); - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), - newFile: filePatch.getNewFile().clone({mode: '120000', symlink: '/new.txt'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), + newFile: fp.getNewFile().clone({mode: '120000', symlink: '/new.txt'}), + })], }); - const wrapper = mount(buildApp({filePatch: fp, stagingStatus: 'staged', toggleSymlinkChange})); + const wrapper = mount(buildApp({multiFilePatch: mfp, stagingStatus: 'staged', toggleSymlinkChange})); const meta = wrapper.find('FilePatchMetaView[title="Symlink changed"]'); assert.isTrue(meta.exists()); assert.strictEqual(meta.prop('actionIcon'), 'icon-move-up'); @@ -325,12 +428,15 @@ describe('FilePatchView', function() { }); it('renders details for a symlink deletion', function() { - const fp = filePatch.clone({ - oldFile: filePatch.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), - newFile: nullFile, + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: fp.getOldFile().clone({mode: '120000', symlink: '/old.txt'}), + newFile: nullFile, + })], }); - const wrapper = mount(buildApp({filePatch: fp})); + const wrapper = mount(buildApp({multiFilePatch: mfp})); const meta = wrapper.find('FilePatchMetaView[title="Symlink deleted"]'); assert.isTrue(meta.exists()); assert.strictEqual( @@ -340,12 +446,15 @@ describe('FilePatchView', function() { }); it('renders details for a symlink creation', function() { - const fp = filePatch.clone({ - oldFile: nullFile, - newFile: filePatch.getOldFile().clone({mode: '120000', symlink: '/new.txt'}), + const [fp] = filePatches.getFilePatches(); + const mfp = filePatches.clone({ + filePatches: [fp.clone({ + oldFile: nullFile, + newFile: fp.getOldFile().clone({mode: '120000', symlink: '/new.txt'}), + })], }); - const wrapper = mount(buildApp({filePatch: fp})); + const wrapper = mount(buildApp({multiFilePatch: mfp})); const meta = wrapper.find('FilePatchMetaView[title="Symlink created"]'); assert.isTrue(meta.exists()); assert.strictEqual( @@ -357,28 +466,22 @@ describe('FilePatchView', function() { describe('hunk headers', function() { it('renders one for each hunk', function() { - const fp = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 3, - heading: 'first hunk', - lines: [' 0000', '+0001', ' 0002'], - }, - { - oldStartLine: 10, oldLineCount: 3, newStartLine: 11, newLineCount: 2, - heading: 'second hunk', - lines: [' 0003', '-0004', ' 0005'], - }, - ], - }]); - const hunks = fp.getHunks(); - - const wrapper = mount(buildApp({filePatch: fp})); + const {multiFilePatch: mfp} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(1); + h.unchanged('0000').added('0001').unchanged('0002'); + }); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0003').deleted('0004').unchanged('0005'); + }); + }).build(); + + const hunks = mfp.getFilePatches()[0].getHunks(); + const wrapper = mount(buildApp({multiFilePatch: mfp})); + assert.isTrue(wrapper.find('HunkHeaderView').someWhere(h => h.prop('hunk') === hunks[0])); assert.isTrue(wrapper.find('HunkHeaderView').someWhere(h => h.prop('hunk') === hunks[1])); }); @@ -440,30 +543,23 @@ describe('FilePatchView', function() { }); it('handles mousedown as a selection event', function() { - const fp = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 1, oldLineCount: 2, newStartLine: 1, newLineCount: 3, - heading: 'first hunk', - lines: [' 0000', '+0001', ' 0002'], - }, - { - oldStartLine: 10, oldLineCount: 3, newStartLine: 11, newLineCount: 2, - heading: 'second hunk', - lines: [' 0003', '-0004', ' 0005'], - }, - ], - }]); + const {multiFilePatch: mfp} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(1); + h.unchanged('0000').added('0001').unchanged('0002'); + }); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0003').deleted('0004').unchanged('0005'); + }); + }).build(); const selectedRowsChanged = sinon.spy(); - const wrapper = mount(buildApp({filePatch: fp, selectedRowsChanged, selectionMode: 'line'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, selectedRowsChanged, selectionMode: 'line'})); - wrapper.find('HunkHeaderView').at(1).prop('mouseDown')({button: 0}, fp.getHunks()[1]); + wrapper.find('HunkHeaderView').at(1).prop('mouseDown')({button: 0}, mfp.getFilePatches()[0].getHunks()[1]); assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [4]); assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'hunk'); @@ -549,8 +645,9 @@ describe('FilePatchView', function() { const decorations = layerWrapper.find('Decoration[type="line-number"][gutterName="diff-icons"]'); assert.isTrue(decorations.exists()); }; - assertLayerDecorated(filePatch.getAdditionLayer()); - assertLayerDecorated(filePatch.getDeletionLayer()); + + assertLayerDecorated(filePatches.getAdditionLayer()); + assertLayerDecorated(filePatches.getDeletionLayer()); atomEnv.config.set('github.showDiffIconGutter', false); wrapper.update(); @@ -751,32 +848,24 @@ describe('FilePatchView', function() { let linesPatch; beforeEach(function() { - linesPatch = buildFilePatch([{ - oldPath: 'file.txt', - oldMode: '100644', - newPath: 'file.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 1, oldLineCount: 3, newStartLine: 1, newLineCount: 6, - heading: 'first hunk', - lines: [' 0000', '+0001', '+0002', '-0003', '+0004', '+0005', ' 0006'], - }, - { - oldStartLine: 10, oldLineCount: 0, newStartLine: 13, newLineCount: 0, - heading: 'second hunk', - lines: [ - ' 0007', '-0008', '-0009', '-0010', ' 0011', '+0012', '+0013', '+0014', '-0015', ' 0016', - '\\ No newline at end of file', - ], - }, - ], - }]); + + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(1); + h.unchanged('0000').added('0001', '0002').deleted('0003').added('0004').added('0005').unchanged('0006'); + }); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0007').deleted('0008', '0009', '0010').unchanged('0011').added('0012', '0013', '0014').deleted('0015').unchanged('0016').noNewline(); + }); + }).build(); + linesPatch = multiFilePatch; }); it('decorates added lines', function() { - const wrapper = mount(buildApp({filePatch: linesPatch})); + const wrapper = mount(buildApp({multiFilePatch: linesPatch})); const decorationSelector = 'Decoration[type="line"][className="github-FilePatchView-line--added"]'; const decoration = wrapper.find(decorationSelector); @@ -787,7 +876,7 @@ describe('FilePatchView', function() { }); it('decorates deleted lines', function() { - const wrapper = mount(buildApp({filePatch: linesPatch})); + const wrapper = mount(buildApp({multiFilePatch: linesPatch})); const decorationSelector = 'Decoration[type="line"][className="github-FilePatchView-line--deleted"]'; const decoration = wrapper.find(decorationSelector); @@ -798,7 +887,7 @@ describe('FilePatchView', function() { }); it('decorates the nonewline line', function() { - const wrapper = mount(buildApp({filePatch: linesPatch})); + const wrapper = mount(buildApp({multiFilePatch: linesPatch})); const decorationSelector = 'Decoration[type="line"][className="github-FilePatchView-line--nonewline"]'; const decoration = wrapper.find(decorationSelector); @@ -809,22 +898,60 @@ describe('FilePatchView', function() { }); }); - it('notifies a callback when the editor selection changes', function() { - const selectedRowsChanged = sinon.spy(); - const wrapper = mount(buildApp({selectedRowsChanged})); - const editor = wrapper.find('AtomTextEditor').instance().getModel(); + describe('editor selection change notification', function() { + let multiFilePatch; - selectedRowsChanged.resetHistory(); + beforeEach(function() { + multiFilePatch = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('0')); + fp.addHunk(h => h.oldRow(1).unchanged('0').added('1', '2').deleted('3').unchanged('4')); + fp.addHunk(h => h.oldRow(10).unchanged('5').added('6', '7').deleted('8').unchanged('9')); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('1')); + fp.addHunk(h => h.oldRow(1).unchanged('10').added('11', '12').deleted('13').unchanged('14')); + fp.addHunk(h => h.oldRow(10).unchanged('15').added('16', '17').deleted('18').unchanged('19')); + }) + .build() + .multiFilePatch; + }); - editor.setSelectedBufferRange([[5, 1], [6, 2]]); + it('notifies a callback when the selected rows change', function() { + const selectedRowsChanged = sinon.spy(); + const wrapper = mount(buildApp({multiFilePatch, selectedRowsChanged})); + const editor = wrapper.find('AtomTextEditor').instance().getModel(); - assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [6]); - assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'hunk'); + selectedRowsChanged.resetHistory(); + + editor.setSelectedBufferRange([[5, 1], [6, 2]]); + + assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [6]); + assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'hunk'); + assert.isFalse(selectedRowsChanged.lastCall.args[2]); + }); + + it('notifies a callback when cursors span multiple files', function() { + const selectedRowsChanged = sinon.spy(); + const wrapper = mount(buildApp({multiFilePatch, selectedRowsChanged})); + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + + selectedRowsChanged.resetHistory(); + editor.setSelectedBufferRanges([ + [[5, 0], [5, 0]], + [[16, 0], [16, 0]], + ]); + + assert.sameMembers(Array.from(selectedRowsChanged.lastCall.args[0]), [16]); + assert.strictEqual(selectedRowsChanged.lastCall.args[1], 'hunk'); + assert.isTrue(selectedRowsChanged.lastCall.args[2]); + }); }); describe('when viewing an empty patch', function() { it('renders an empty patch message', function() { - const wrapper = shallow(buildApp({filePatch: FilePatch.createNull()})); + const {multiFilePatch: emptyMfp} = multiFilePatchBuilder().build(); + const wrapper = shallow(buildApp({multiFilePatch: emptyMfp})); assert.isTrue(wrapper.find('.github-FilePatchView').hasClass('github-FilePatchView--blank')); assert.isTrue(wrapper.find('.github-FilePatchView-message').exists()); }); @@ -836,6 +963,114 @@ describe('FilePatchView', function() { }); describe('registers Atom commands', function() { + it('toggles all mode changes', function() { + function tenLineHunk(builder) { + builder.addHunk(h => { + for (let i = 0; i < 10; i++) { + h.added('xxxxx'); + } + }); + } + + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f0')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f1')); + fp.setNewFile(f => f.path('f1').executable()); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f2')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f3').executable()); + fp.setNewFile(f => f.path('f3')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f4').executable()); + tenLineHunk(fp); + }) + .build(); + const toggleModeChange = sinon.spy(); + const wrapper = mount(buildApp({toggleModeChange, multiFilePatch})); + + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + editor.setSelectedBufferRanges([ + [[5, 0], [5, 2]], + [[37, 0], [42, 0]], + ]); + + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:stage-file-mode-change'); + + const [fp0, fp1, fp2, fp3, fp4] = multiFilePatch.getFilePatches(); + + assert.isFalse(toggleModeChange.calledWith(fp0)); + assert.isFalse(toggleModeChange.calledWith(fp1)); + assert.isFalse(toggleModeChange.calledWith(fp2)); + assert.isTrue(toggleModeChange.calledWith(fp3)); + assert.isFalse(toggleModeChange.calledWith(fp4)); + }); + + it('toggles all symlink changes', function() { + function tenLineHunk(builder) { + builder.addHunk(h => { + for (let i = 0; i < 10; i++) { + h.added('zzzzz'); + } + }); + } + + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f0').symlinkTo('elsewhere')); + fp.setNewFile(f => f.path('f0')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f1')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setNewFile(f => f.path('f2')); + fp.setOldFile(f => f.path('f2').symlinkTo('somewhere')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f3').symlinkTo('unchanged')); + tenLineHunk(fp); + }) + .addFilePatch(fp => { + fp.setOldFile(f => f.path('f4').executable()); + tenLineHunk(fp); + }) + .build(); + + const toggleSymlinkChange = sinon.spy(); + const wrapper = mount(buildApp({toggleSymlinkChange, multiFilePatch})); + + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + editor.setSelectedBufferRanges([ + [[0, 0], [2, 2]], + [[5, 1], [6, 2]], + [[37, 0], [37, 0]], + ]); + + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:stage-symlink-change'); + + const [fp0, fp1, fp2, fp3, fp4] = multiFilePatch.getFilePatches(); + + assert.isTrue(toggleSymlinkChange.calledWith(fp0)); + assert.isFalse(toggleSymlinkChange.calledWith(fp1)); + assert.isFalse(toggleSymlinkChange.calledWith(fp2)); + assert.isFalse(toggleSymlinkChange.calledWith(fp3)); + assert.isFalse(toggleSymlinkChange.calledWith(fp4)); + }); + it('toggles the current selection', function() { const toggleRows = sinon.spy(); const wrapper = mount(buildApp({toggleRows})); @@ -922,55 +1157,47 @@ describe('FilePatchView', function() { }); it('surfaces focus to the git tab', function() { - const surfaceFile = sinon.spy(); - const wrapper = mount(buildApp({surfaceFile})); + const surface = sinon.spy(); + const wrapper = mount(buildApp({surface})); - atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:surface-file'); - assert.isTrue(surfaceFile.called); + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:surface'); + assert.isTrue(surface.called); }); describe('hunk mode navigation', function() { + let mfp; + beforeEach(function() { - filePatch = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 4, oldLineCount: 2, newStartLine: 4, newLineCount: 3, - heading: 'zero', - lines: [' 0000', '+0001', ' 0002'], - }, - { - oldStartLine: 10, oldLineCount: 3, newStartLine: 11, newLineCount: 2, - heading: 'one', - lines: [' 0003', '-0004', ' 0005'], - }, - { - oldStartLine: 20, oldLineCount: 2, newStartLine: 20, newLineCount: 3, - heading: 'two', - lines: [' 0006', '+0007', ' 0008'], - }, - { - oldStartLine: 30, oldLineCount: 2, newStartLine: 31, newLineCount: 3, - heading: 'three', - lines: [' 0009', '+0010', ' 0011'], - }, - { - oldStartLine: 40, oldLineCount: 4, newStartLine: 42, newLineCount: 2, - heading: 'four', - lines: [' 0012', '-0013', '-0014', ' 0015'], - }, - ], - }]); + const {multiFilePatch} = multiFilePatchBuilder().addFilePatch(fp => { + fp.setOldFile(f => f.path('path.txt')); + fp.addHunk(h => { + h.oldRow(4); + h.unchanged('0000').added('0001').unchanged('0002'); + }); + fp.addHunk(h => { + h.oldRow(10); + h.unchanged('0003').deleted('0004').unchanged('0005'); + }); + fp.addHunk(h => { + h.oldRow(20); + h.unchanged('0006').added('0007').unchanged('0008'); + }); + fp.addHunk(h => { + h.oldRow(30); + h.unchanged('0009').added('0010').unchanged('0011'); + }); + fp.addHunk(h => { + h.oldRow(40); + h.unchanged('0012').deleted('0013', '0014').unchanged('0015'); + }); + }).build(); + mfp = multiFilePatch; }); it('advances the selection to the next hunks', function() { const selectedRowsChanged = sinon.spy(); const selectedRows = new Set([1, 7, 10]); - const wrapper = mount(buildApp({filePatch, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setSelectedBufferRanges([ [[0, 0], [2, 4]], // hunk 0 @@ -994,7 +1221,7 @@ describe('FilePatchView', function() { it('does not advance a selected hunk at the end of the patch', function() { const selectedRowsChanged = sinon.spy(); const selectedRows = new Set([4, 13, 14]); - const wrapper = mount(buildApp({filePatch, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setSelectedBufferRanges([ [[3, 0], [5, 4]], // hunk 1 @@ -1016,7 +1243,7 @@ describe('FilePatchView', function() { it('retreats the selection to the previous hunks', function() { const selectedRowsChanged = sinon.spy(); const selectedRows = new Set([4, 10, 13, 14]); - const wrapper = mount(buildApp({filePatch, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setSelectedBufferRanges([ [[3, 0], [5, 4]], // hunk 1 @@ -1040,7 +1267,7 @@ describe('FilePatchView', function() { it('does not retreat a selected hunk at the beginning of the patch', function() { const selectedRowsChanged = sinon.spy(); const selectedRows = new Set([4, 10, 13, 14]); - const wrapper = mount(buildApp({filePatch, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); + const wrapper = mount(buildApp({multiFilePatch: mfp, selectedRowsChanged, selectedRows, selectionMode: 'hunk'})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setSelectedBufferRanges([ [[0, 0], [2, 4]], // hunk 0 @@ -1060,72 +1287,73 @@ describe('FilePatchView', function() { }); }); - describe('opening the file', function() { - let fp; + describe('jump to file', function() { + let mfp, fp; beforeEach(function() { - fp = buildFilePatch([{ - oldPath: 'path.txt', - oldMode: '100644', - newPath: 'path.txt', - newMode: '100644', - status: 'modified', - hunks: [ - { - oldStartLine: 2, oldLineCount: 2, newStartLine: 2, newLineCount: 3, - heading: 'first hunk', - // 2 3 4 - lines: [' 0000', '+0001', ' 0002'], - }, - { - oldStartLine: 10, oldLineCount: 5, newStartLine: 11, newLineCount: 6, - heading: 'second hunk', - // 11 12 13 14 15 16 - lines: [' 0003', '+0004', '+0005', '-0006', ' 0007', '+0008', '-0009', ' 0010'], - }, - ], - }]); + const {multiFilePatch} = multiFilePatchBuilder() + .addFilePatch(filePatch => { + filePatch.setOldFile(f => f.path('path.txt')); + filePatch.addHunk(h => { + h.oldRow(2); + h.unchanged('0000').added('0001').unchanged('0002'); + }); + filePatch.addHunk(h => { + h.oldRow(10); + h.unchanged('0003').added('0004', '0005').deleted('0006').unchanged('0007').added('0008').deleted('0009').unchanged('0010'); + }); + }) + .addFilePatch(filePatch => { + filePatch.setOldFile(f => f.path('other.txt')); + filePatch.addHunk(h => { + h.oldRow(10); + h.unchanged('0011').added('0012').unchanged('0013'); + }); + }) + .build(); + + mfp = multiFilePatch; + fp = mfp.getFilePatches()[0]; }); it('opens the file at the current unchanged row', function() { const openFile = sinon.spy(); - const wrapper = mount(buildApp({filePatch: fp, openFile})); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setCursorBufferPosition([7, 2]); - atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:open-file'); - - assert.isTrue(openFile.calledWith([[14, 2]])); + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:jump-to-file'); + assert.isTrue(openFile.calledWith(fp, [[13, 2]], true)); }); it('opens the file at a current added row', function() { const openFile = sinon.spy(); - const wrapper = mount(buildApp({filePatch: fp, openFile})); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setCursorBufferPosition([8, 3]); - atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:open-file'); + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:jump-to-file'); - assert.isTrue(openFile.calledWith([[15, 3]])); + assert.isTrue(openFile.calledWith(fp, [[14, 3]], true)); }); it('opens the file at the beginning of the previous added or unchanged row', function() { const openFile = sinon.spy(); - const wrapper = mount(buildApp({filePatch: fp, openFile})); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setCursorBufferPosition([9, 2]); - atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:open-file'); + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:jump-to-file'); - assert.isTrue(openFile.calledWith([[15, 0]])); + assert.isTrue(openFile.calledWith(fp, [[15, 0]], true)); }); it('preserves multiple cursors', function() { const openFile = sinon.spy(); - const wrapper = mount(buildApp({filePatch: fp, openFile})); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); const editor = wrapper.find('AtomTextEditor').instance().getModel(); editor.setCursorBufferPosition([3, 2]); @@ -1136,14 +1364,69 @@ describe('FilePatchView', function() { // [9, 2] and [9, 3] should be collapsed into a single cursor at [15, 0] - atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:open-file'); + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:jump-to-file'); - assert.isTrue(openFile.calledWith([ + assert.isTrue(openFile.calledWith(fp, [ + [10, 2], [11, 2], - [12, 2], - [3, 3], + [2, 3], [15, 0], - ])); + ], true)); + }); + + it('opens non-pending editors when opening multiple', function() { + const openFile = sinon.spy(); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); + + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + editor.setSelectedBufferRanges([ + [[4, 0], [4, 0]], + [[12, 0], [12, 0]], + ]); + + atomEnv.commands.dispatch(wrapper.getDOMNode(), 'github:jump-to-file'); + + assert.isTrue(openFile.calledWith(mfp.getFilePatches()[0], [[11, 0]], false)); + assert.isTrue(openFile.calledWith(mfp.getFilePatches()[1], [[10, 0]], false)); + }); + + describe('didOpenFile(selectedFilePatch)', function() { + describe('when there is a selection in the selectedFilePatch', function() { + it('opens the file and places the cursor corresponding to the selection', function() { + const openFile = sinon.spy(); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); + + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + editor.setSelectedBufferRanges([ + [[4, 0], [4, 0]], // cursor in first file patch + ]); + + // click button for first file + wrapper.find('.github-FilePatchHeaderView-jumpToFileButton').at(0).simulate('click'); + + assert.isTrue(openFile.calledWith(mfp.getFilePatches()[0], [[11, 0]], true)); + }); + }); + + describe('when there are no selections in the selectedFilePatch', function() { + it('opens the file and places the cursor at the beginning of the first hunk', function() { + const openFile = sinon.spy(); + const wrapper = mount(buildApp({multiFilePatch: mfp, openFile})); + + const editor = wrapper.find('AtomTextEditor').instance().getModel(); + editor.setSelectedBufferRanges([ + [[4, 0], [4, 0]], // cursor in first file patch + ]); + + // click button for second file + wrapper.find('.github-FilePatchHeaderView-jumpToFileButton').at(1).simulate('click'); + + const secondFilePatch = mfp.getFilePatches()[1]; + const firstHunkBufferRow = secondFilePatch.getHunks()[0].getNewStartRow() - 1; + + assert.isTrue(openFile.calledWith(secondFilePatch, [[firstHunkBufferRow, 0]], true)); + }); + }); }); }); }); diff --git a/test/views/staging-view.test.js b/test/views/staging-view.test.js index 8c9fb955031..3fb74bea3df 100644 --- a/test/views/staging-view.test.js +++ b/test/views/staging-view.test.js @@ -224,12 +224,12 @@ describe('StagingView', function() { it('passes activation options and focuses the returned item if activate is true', async function() { const wrapper = mount(app); - const filePatchItem = { - getElement: () => filePatchItem, - querySelector: () => filePatchItem, + const changedFileItem = { + getElement: () => changedFileItem, + querySelector: () => changedFileItem, focus: sinon.spy(), }; - workspace.open.returns(filePatchItem); + workspace.open.returns(changedFileItem); await wrapper.instance().showFilePatchItem('file.txt', 'staged', {activate: true}); @@ -238,15 +238,15 @@ describe('StagingView', function() { `atom-github://file-patch/file.txt?workdir=${encodeURIComponent(workingDirectoryPath)}&stagingStatus=staged`, {pending: true, activatePane: true, pane: undefined, activateItem: true}, ]); - assert.isTrue(filePatchItem.focus.called); + assert.isTrue(changedFileItem.focus.called); }); it('makes the item visible if activate is false', async function() { const wrapper = mount(app); const focus = sinon.spy(); - const filePatchItem = {focus}; - workspace.open.returns(filePatchItem); + const changedFileItem = {focus}; + workspace.open.returns(changedFileItem); const activateItem = sinon.spy(); workspace.paneForItem.returns({activateItem}); @@ -259,7 +259,7 @@ describe('StagingView', function() { ]); assert.isFalse(focus.called); assert.equal(activateItem.callCount, 1); - assert.equal(activateItem.args[0][0], filePatchItem); + assert.equal(activateItem.args[0][0], changedFileItem); }); }); }); diff --git a/test/watch-workspace-item.test.js b/test/watch-workspace-item.test.js new file mode 100644 index 00000000000..0d63a4c95d8 --- /dev/null +++ b/test/watch-workspace-item.test.js @@ -0,0 +1,181 @@ +import {watchWorkspaceItem} from '../lib/watch-workspace-item'; +import URIPattern from '../lib/atom/uri-pattern'; + +describe('watchWorkspaceItem', function() { + let sub, atomEnv, workspace, component; + + beforeEach(function() { + atomEnv = global.buildAtomEnvironment(); + workspace = atomEnv.workspace; + + component = { + state: {}, + setState: sinon.stub().callsFake((updater, cb) => cb && cb()), + }; + + workspace.addOpener(uri => { + if (uri.startsWith('atom-github://')) { + return { + getURI() { return uri; }, + + getElement() { + if (!this.element) { + this.element = document.createElement('div'); + } + return this.element; + }, + }; + } else { + return undefined; + } + }); + }); + + afterEach(function() { + sub && sub.dispose(); + atomEnv.destroy(); + }); + + describe('initial state', function() { + it('creates component state if none is present', function() { + component.state = undefined; + + sub = watchWorkspaceItem(workspace, 'atom-github://item', component, 'aKey'); + assert.deepEqual(component.state, {aKey: false}); + }); + + it('is false when the pane is not open', async function() { + await workspace.open('atom-github://nonmatching'); + + sub = watchWorkspaceItem(workspace, 'atom-github://item', component, 'someKey'); + assert.isFalse(component.state.someKey); + }); + + it('is false when the pane is open but not active', async function() { + await workspace.open('atom-github://item/one'); + await workspace.open('atom-github://item/two'); + + sub = watchWorkspaceItem(workspace, 'atom-github://item/one', component, 'theKey'); + assert.isFalse(component.state.theKey); + }); + + it('is true when the pane is already open and active', async function() { + await workspace.open('atom-github://item/two'); + await workspace.open('atom-github://item/one'); + + sub = watchWorkspaceItem(workspace, 'atom-github://item/one', component, 'theKey'); + assert.isTrue(component.state.theKey); + }); + + it('is true when the pane is open and active in any pane', async function() { + await workspace.open('atom-github://some-item', {location: 'right'}); + await workspace.open('atom-github://nonmatching'); + + assert.strictEqual(workspace.getRightDock().getActivePaneItem().getURI(), 'atom-github://some-item'); + assert.strictEqual(workspace.getActivePaneItem().getURI(), 'atom-github://nonmatching'); + + sub = watchWorkspaceItem(workspace, 'atom-github://some-item', component, 'someKey'); + assert.isTrue(component.state.someKey); + }); + + it('accepts a preconstructed URIPattern', async function() { + await workspace.open('atom-github://item/one'); + const u = new URIPattern('atom-github://item/{pattern}'); + + sub = watchWorkspaceItem(workspace, u, component, 'theKey'); + assert.isTrue(component.state.theKey); + }); + }); + + describe('workspace events', function() { + it('becomes true when the pane is opened', async function() { + sub = watchWorkspaceItem(workspace, 'atom-github://item/{pattern}', component, 'theKey'); + assert.isFalse(component.state.theKey); + + await workspace.open('atom-github://item/match'); + + assert.isTrue(component.setState.calledWith({theKey: true})); + }); + + it('remains true if another matching pane is opened', async function() { + await workspace.open('atom-github://item/match0'); + sub = watchWorkspaceItem(workspace, 'atom-github://item/{pattern}', component, 'theKey'); + assert.isTrue(component.state.theKey); + + await workspace.open('atom-github://item/match1'); + assert.isFalse(component.setState.called); + }); + + it('becomes false if a nonmatching pane is opened', async function() { + await workspace.open('atom-github://item/match0'); + sub = watchWorkspaceItem(workspace, 'atom-github://item/{pattern}', component, 'theKey'); + assert.isTrue(component.state.theKey); + + await workspace.open('atom-github://other-item/match1'); + assert.isTrue(component.setState.calledWith({theKey: false})); + }); + + it('becomes false if the last matching pane is closed', async function() { + await workspace.open('atom-github://item/match0'); + await workspace.open('atom-github://item/match1'); + + sub = watchWorkspaceItem(workspace, 'atom-github://item/{pattern}', component, 'theKey'); + assert.isTrue(component.state.theKey); + + assert.isTrue(workspace.hide('atom-github://item/match1')); + assert.isFalse(component.setState.called); + + assert.isTrue(workspace.hide('atom-github://item/match0')); + assert.isTrue(component.setState.calledWith({theKey: false})); + }); + }); + + it('stops updating when disposed', async function() { + sub = watchWorkspaceItem(workspace, 'atom-github://item', component, 'theKey'); + assert.isFalse(component.state.theKey); + + sub.dispose(); + await workspace.open('atom-github://item'); + assert.isFalse(component.setState.called); + + await workspace.hide('atom-github://item'); + assert.isFalse(component.setState.called); + }); + + describe('setPattern', function() { + it('immediately updates the state based on the new pattern', async function() { + sub = watchWorkspaceItem(workspace, 'atom-github://item0/{pattern}', component, 'theKey'); + assert.isFalse(component.state.theKey); + + await workspace.open('atom-github://item1/match'); + assert.isFalse(component.setState.called); + + await sub.setPattern('atom-github://item1/{pattern}'); + assert.isFalse(component.state.theKey); + assert.isTrue(component.setState.calledWith({theKey: true})); + }); + + it('uses the new pattern to keep state up to date', async function() { + sub = watchWorkspaceItem(workspace, 'atom-github://item0/{pattern}', component, 'theKey'); + await sub.setPattern('atom-github://item1/{pattern}'); + + await workspace.open('atom-github://item0/match'); + assert.isFalse(component.setState.called); + + await workspace.open('atom-github://item1/match'); + assert.isTrue(component.setState.calledWith({theKey: true})); + }); + + it('accepts a preconstructed URIPattern', async function() { + sub = watchWorkspaceItem(workspace, 'atom-github://item0/{pattern}', component, 'theKey'); + assert.isFalse(component.state.theKey); + + await workspace.open('atom-github://item1/match'); + assert.isFalse(component.setState.called); + + await sub.setPattern(new URIPattern('atom-github://item1/{pattern}')); + assert.isFalse(component.state.theKey); + assert.isTrue(component.setState.calledWith({theKey: true})); + }); + }); +});