Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: added new flag for serial process #166

Merged
merged 3 commits into from
Jul 26, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
241 changes: 100 additions & 141 deletions packages/plugin-data/command-snapshot.json
Original file line number Diff line number Diff line change
@@ -1,143 +1,102 @@
[
{
"command": "force:data:bulk:delete",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"csvfile",
"json",
"loglevel",
"sobjecttype",
"targetusername",
"wait"
]
},
{
"command": "force:data:bulk:status",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"batchid",
"jobid",
"json",
"loglevel",
"targetusername"
]
},
{
"command": "force:data:bulk:upsert",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"csvfile",
"externalid",
"json",
"loglevel",
"sobjecttype",
"targetusername",
"wait"
]
},
{
"command": "force:data:record:create",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjecttype",
"targetusername",
"usetoolingapi",
"values"
]
},
{
"command": "force:data:record:delete",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"where"
]
},
{
"command": "force:data:record:get",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"where"
]
},
{
"command": "force:data:record:update",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"values",
"where"
]
},
{
"command": "force:data:soql:query",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"query",
"perflog",
"resultformat",
"targetusername",
"usetoolingapi"
]
},
{
"command": "force:data:tree:export",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"outputdir",
"plan",
"prefix",
"query",
"targetusername"
]
},
{
"command": "force:data:tree:import",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"confighelp",
"contenttype",
"json",
"loglevel",
"plan",
"sobjecttreefiles",
"targetusername"
]
}
{
"command": "force:data:bulk:delete",
"plugin": "@salesforce/plugin-data",
"flags": ["apiversion", "csvfile", "json", "loglevel", "sobjecttype", "targetusername", "wait"]
},
{
"command": "force:data:bulk:status",
"plugin": "@salesforce/plugin-data",
"flags": ["apiversion", "batchid", "jobid", "json", "loglevel", "targetusername"]
},
{
"command": "force:data:bulk:upsert",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"csvfile",
"externalid",
"json",
"loglevel",
"serial",
"sobjecttype",
"targetusername",
"wait"
]
},
{
"command": "force:data:record:create",
"plugin": "@salesforce/plugin-data",
"flags": ["apiversion", "json", "loglevel", "perflog", "sobjecttype", "targetusername", "usetoolingapi", "values"]
},
{
"command": "force:data:record:delete",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"where"
]
},
{
"command": "force:data:record:get",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"where"
]
},
{
"command": "force:data:record:update",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"json",
"loglevel",
"perflog",
"sobjectid",
"sobjecttype",
"targetusername",
"usetoolingapi",
"values",
"where"
]
},
{
"command": "force:data:soql:query",
"plugin": "@salesforce/plugin-data",
"flags": ["apiversion", "json", "loglevel", "perflog", "query", "resultformat", "targetusername", "usetoolingapi"]
},
{
"command": "force:data:tree:export",
"plugin": "@salesforce/plugin-data",
"flags": ["apiversion", "json", "loglevel", "outputdir", "plan", "prefix", "query", "targetusername"]
},
{
"command": "force:data:tree:import",
"plugin": "@salesforce/plugin-data",
"flags": [
"apiversion",
"confighelp",
"contenttype",
"json",
"loglevel",
"plan",
"sobjecttreefiles",
"targetusername"
]
}
]
5 changes: 3 additions & 2 deletions packages/plugin-data/messages/bulk.upsert.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"description": "bulk upsert records from a CSV file\nInserts or updates records from a CSV file.\n\nOne job can contain many batches, depending on the length of the CSV file.\nReturns a job ID and a batch ID. Use these IDs to check job status with data:bulk:status.\n\nFor information about formatting your CSV file, see \"Prepare CSV Files\" in the Bulk API Developer Guide. \n\n",
"description": "bulk upsert records from a CSV file\nInserts or updates records from a CSV file.\n\nOne job can contain many batches, depending on the length of the CSV file.\nReturns a job ID and a batch ID. Use these IDs to check job status with data:bulk:status.\n\nFor information about formatting your CSV file, see \"Prepare CSV Files\" in the Bulk API Developer Guide. \n\nBy default, the job runs the batches in parallel. Specify --serial to run them serially.",
"examples": [
"sfdx force:data:bulk:upsert -s MyObject__c -f ./path/to/file.csv -i MyField__c",
"sfdx force:data:bulk:upsert -s MyObject__c -f ./path/to/file.csv -i Id -w 2"
Expand All @@ -8,6 +8,7 @@
"sobjecttype": "the sObject type of the records you want to upsert",
"csvfile": "the path to the CSV file that defines the records to upsert",
"externalid": "the column name of the external ID",
"wait": "the number of minutes to wait for the command to complete before displaying the results"
"wait": "the number of minutes to wait for the command to complete before displaying the results",
"serial": "run batches in serial mode"
}
}
8 changes: 7 additions & 1 deletion packages/plugin-data/src/commands/force/data/bulk/upsert.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ export default class Upsert extends DataCommand {
description: messages.getMessage('flags.wait'),
min: 0,
}),
serial: flags.boolean({
char: 'r',
description: messages.getMessage('flags.serial'),
default: false,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you can leave off default:false since that's the automatic value for booleans when they aren't there.

}),
};

public async run(): Promise<JobInfo[] | BulkResult[]> {
Expand All @@ -51,9 +56,10 @@ export default class Upsert extends DataCommand {
const batcher: Batcher = new Batcher(conn, this.ux);
const csvStream: ReadStream = fs.createReadStream(this.flags.csvfile, { encoding: 'utf-8' });

const concurrencyMode = this.flags.serial ? 'Serial' : 'Parallel';
const job = conn.bulk.createJob(this.flags.sobjecttype, 'upsert', {
extIdField: this.flags.externalid as string,
concurrencyMode: 'Parallel',
concurrencyMode,
});

// eslint-disable-next-line @typescript-eslint/no-misused-promises,no-async-promise-executor
Expand Down
53 changes: 53 additions & 0 deletions packages/plugin-data/test/commands/force/data/bulk/upsert.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -136,4 +136,57 @@ describe('force:data:bulk:upsert', () => {
expect(result.exitCode).to.equal(1);
expect(result.message).to.equal('Error');
});

const expectedSerialJob = {
id: '7503F000004rVEMQA2',
operation: 'upsert',
object: 'custom__c',
createdById: '0053F000007vESSQA2',
createdDate: '2021-01-19T23:05:32.000Z',
systemModstamp: '2021-01-19T23:05:33.000Z',
state: 'Closed',
externalIdFieldName: 'field__c',
concurrencyMode: 'Serial',
contentType: 'CSV',
numberBatchesQueued: '0',
numberBatchesInProgress: '0',
numberBatchesCompleted: '1',
numberBatchesFailed: '0',
numberBatchesTotal: '1',
numberRecordsProcessed: '18',
numberRetries: '0',
apiVersion: '50.0',
numberRecordsFailed: '0',
totalProcessingTime: '80',
apiActiveProcessingTime: '46',
apexProcessingTime: '0',
};

test
.withOrg({ username: 'test@org.com' }, true)
.do(() => {
stubMethod($$.SANDBOX, fs, 'fileExists').resolves(true);
stubMethod($$.SANDBOX, fs, 'createReadStream').returns(ReadStream.prototype);
stubMethod($$.SANDBOX, Batcher.prototype, 'createAndExecuteBatches').resolves(expectedSerialJob);
})
.stdout()
.command([
'force:data:bulk:upsert',
'--targetusername',
'test@org.com',
'--sobjecttype',
'custom__c',
'--csvfile',
'fileToUpsert.csv',
'--externalid',
'field__c',
'--wait',
'5',
'--json',
'--serial',
])
.it('should upsert the data correctly while waiting', (ctx) => {
const result = JSON.parse(ctx.stdout) as never;
expect(result).to.deep.equal({ status: 0, result: expectedSerialJob });
});
});