Integrating with Salesforce using Batch Apex
This guide walks you through setting up a reliable batch integration between your system and Salesforce. It covers authentication, data extraction, transformations, and loading using Salesforce Batch Apex.
Prerequisites
- Salesforce org with API access enabled.
- Connected App with OAuth 2.0 client credentials.
- Node.js (v14+) or Python (3.8+) for the external script.
- Access to the target object(s) you intend to process.
Step 1 – Create a Connected App
- Navigate to Setup → App Manager → New Connected App.
- Enable OAuth Settings and check “Enable OAuth Settings”.
- Add the following OAuth scopes:
- Access and manage your data (api)
- Perform requests on your behalf at any time (refresh_token, offline_access)
- Set the callback URL to
https://yourdomain.com/oauth/callback(dummy URL for server‑to‑server). - Save and record the Consumer Key and Consumer Secret.
Step 2 – Obtain an Access Token
curl -X POST https://login.salesforce.com/services/oauth2/token \
-d "grant_type=client_credentials" \
-d "client_id=YOUR_CONSUMER_KEY" \
-d "client_secret=YOUR_CONSUMER_SECRET"
Step 3 – Build the Batch Apex Class
global class IntegrationBatch implements Database.Batchable<SObject>, Database.Stateful {
global Database.QueryLocator start(Database.BatchableContext bc) {
return Database.getQueryLocator('SELECT Id, Name, Custom_Field__c FROM Custom_Object__c WHERE Needs_Integration__c = true');
}
global void execute(Database.BatchableContext bc, List<SObject> scope) {
List<Custom_Object__c> records = (List<Custom_Object__c>)scope;
// Transform and flag records
for(Custom_Object__c rec : records){
// Example: Concatenate fields for external system
rec.Integration_Payload__c = rec.Name + '|' + rec.Custom_Field__c;
rec.Needs_Integration__c = false;
}
update records;
}
global void finish(Database.BatchableContext bc) {
// Optional: call an external web service or send a platform event
}
}
Step 4 – Schedule the Batch
global class IntegrationBatchScheduler implements Schedulable {
global void execute(SchedulableContext sc) {
IntegrationBatch batch = new IntegrationBatch();
Database.executeBatch(batch, 200);
}
}
Schedule with the UI (Setup → Apex Classes → Schedule Apex) or via CLI:
sfdx force:apex:schedule -n IntegrationBatchScheduler -l "Every Night" -c "0 0 * * *"
Step 5 – Consuming the Payload Externally
Node.js Example▶
const fetch = require('node-fetch');
const token = 'YOUR_ACCESS_TOKEN';
const query = \`SELECT Id, Integration_Payload__c FROM Custom_Object__c WHERE Integration_Payload__c != NULL\`;
async function fetchPayloads() {
const url = \`https://yourInstance.salesforce.com/services/data/v57.0/query?q=\${encodeURIComponent(query)}\`;
const res = await fetch(url, {
headers: { Authorization: \`Bearer \${token}\` }
});
const data = await res.json();
data.records.forEach(r => {
console.log('Payload:', r.Integration_Payload__c);
// Send to external system here
});
}
fetchPayloads().catch(console.error);
Python Example▶
import requests
access_token = 'YOUR_ACCESS_TOKEN'
instance_url = 'https://yourInstance.salesforce.com'
query = "SELECT Id, Integration_Payload__c FROM Custom_Object__c WHERE Integration_Payload__c != NULL"
headers = {
'Authorization': f'Bearer {access_token}',
'Content-Type': 'application/json'
}
url = f"{instance_url}/services/data/v57.0/query"
params = {'q': query}
resp = requests.get(url, headers=headers, params=params)
resp.raise_for_status()
for rec in resp.json()['records']:
print('Payload:', rec['Integration_Payload__c'])
# Forward to external endpoint here