Bulk Delete Data from Salesforce
Of course, it goes without saying that this is absolutely lethal to the undisciplined trigger finger, so I would never install this code in your production org!
However it can be useful to purge data from sandboxes in instances where you don't want to for what ever reason refresh the sandbox.
For example you have a lot of test data or data has been inserted from API calls, and you want to start a-fresh.
Its quite simple really, just mindlessly follow these steps and the process should take care of most of the heavy lifting for you.
First create the below apex classes
global class DeleteAllDataBatch implements Database.Batchable<SObject>, Database.Stateful {
private String objectName;
public DeleteAllDataBatch(String objectName) {
this.objectName = objectName;
}
global Database.QueryLocator start(Database.BatchableContext BC) {
// Include ALL ROWS to process soft-deleted records
String query = 'SELECT Id FROM ' + objectName + ' ALL ROWS';
return Database.getQueryLocator(query);
}
global void execute(Database.BatchableContext BC, List<SObject> scope) {
try {
delete scope; // Delete current batch
System.debug('Deleted ' + scope.size() + ' records from ' + objectName);
} catch (Exception e) {
System.debug('Error deleting records from ' + objectName + ': ' + e.getMessage());
}
}
global void finish(Database.BatchableContext BC) {
System.debug('Finished batch job for object: ' + objectName);
}
}
public class DeleteAllDataBatchRunner {
public static void runBatchJobs() {
List<String> objectNames = new List<String>{
'Task',
'Event',
'Case',
'Opportunity',
'Contact',
'Account',
'CustomObject__c' // Add all relevant objects
};
for (String objectName : objectNames) {
System.debug('Starting batch for: ' + objectName);
Database.executeBatch(new DeleteAllDataBatch(objectName), 200);
}
}
}
quote you can add any custom objects that you use in your org in a list in this class
public class DynamicDeleteAllDataBatchRunner {
public static void runBatchJobs() {
Integer maxJobs = 90; // Limit to avoid flex queue overflow
Integer currentJobCount = [SELECT COUNT() FROM AsyncApexJob WHERE JobType = 'BatchApex' AND Status IN ('Queued', 'Processing')];
for (Schema.SObjectType obj : Schema.getGlobalDescribe().values()) {
Schema.DescribeSObjectResult describe = obj.getDescribe();
if (describe.isDeletable() && currentJobCount < maxJobs) {
System.debug('Starting batch for: ' + describe.getName());
Database.executeBatch(new DeleteAllDataBatch(describe.getName()), 200);
currentJobCount++;
}
}
System.debug('Jobs queued. Total jobs in progress: ' + currentJobCount);
}
}
Then simply open the Developer Console in Salesforce.
Click your avatar (top-right corner) → Developer Console.
Navigate to the Execute Anonymous Window:
Press Ctrl + E (or Cmd + E on Mac).
Paste the following code into the Execute Anonymous Window:
DynamicDeleteAllDataBatchRunner.runBatchJobs();
Click Execute to run the script.
Then simply monitor the progress of your job from the "Apex Jobs" page in Setup (processing jobs)
And see your data usage fall in the "Storage Usage" page in Setup
🗑️👏