function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
jasmin05jasmin05 

Batchable instance is too big

I created Apex Batch Class.
When I try to inport over 50,000 rows of CSV file, the following error appears.
"Batchable instance is too big".
I don't know how to resolve this issue...
The following is my batch code.


global with sharing class NSRCOR_MER_Import_Batch implements Database.batchable<string>, Database.Stateful{
    private String m_csvFile;
    private String m_batchLogId;
    Boolean isHeader = true;
    private static Integer rowCount;
    private static Integer importCount =0;
    Boolean success = true;
    String remarks = '';
  
    global NSRCOR_MER_Import_Batch(String csfFile, String batchLogId){
        m_csvFile = csfFile;
        m_batchLogId = batchLogId;
    }
    global Iterable<string> start(Database.batchableContext batchableContext){
       return new NSRCOR_MER_CSVIterator(m_csvFile,'\n');
    }
    global void execute(Database.BatchableContext batchableContext, List<string> scope)  {    
        List<NSRCOR_MER_PO__c> importPO = new List<NSRCOR_MER_PO__c>();
        List<NSRCOR_MER_PO__c> updatePO = new List<NSRCOR_MER_PO__c>();
        Integer lineNo = 0;
        Integer countInsert = 0;
        Integer countUpdate = 0;
        if (rowCount == null){rowCount =0;}
    
        // Get list of exist PO data
        Map<String,NSRCOR_MER_PO__c> poMap = new Map<String,NSRCOR_MER_PO__c>();
        for (NSRCOR_MER_PO__c rt : [SELECT id,name,PO_Title__c, Vendor_Name__c FROM NSRCOR_MER_PO__c]){
            poMap.put(rt.name, rt);
        }
        NSRCOR_MER_Import_Log__c importLog =[SELECT Nubmer_of_Rows__c FROM NSRCOR_MER_Import_Log__c WHERE Id=:m_batchLogId ];
        if(importLog.Nubmer_of_Rows__c ==null){
            rowCount = 0;
        }else{
            rowCount = Integer.valueOf(importLog.Nubmer_of_Rows__c);
        }
      
        Map<Integer,Integer> toInsertRownum = new Map<Integer,Integer>();
        Map<Integer,Integer> toUpdateRownum = new Map<Integer,Integer>();
        Map<Integer,String> poMapInsert= new Map<Integer,String>();
        Map<Integer,String> POMapUpdate= new Map<Integer,String>();
 
        for(String row : scope){
            if(isHeader){
              isHeader = false;
            }else{
              List<String> csvValues = row.split(',');
              String poName = null;
              
                // PO
                NSRCOR_MER_PO__c po     = new NSRCOR_MER_PO__c();
                if(poMap.get(this.deSanitize(csvValues[0].trim()))==null){
                    po.Name             = this.deSanitize(csvValues[0].trim());
                    po.PO_Title__c      = this.deSanitize(csvValues[1].trim());
                    po.Vendor_Name__c   = this.deSanitize(csvValues[2].trim());
                    importPO.add(po);
                    poName = po.Name;
                    poMap.put(poName, po);
                    toInsertRownum.put(countInsert,lineNo);
                    poMapInsert.put(countInsert,poName);
                    countInsert++;
                        
                }else{
                    if(poMap.get(this.deSanitize(csvValues[0].trim())).PO_Title__c != this.deSanitize(csvValues[1].trim()) ||
                       poMap.get(this.deSanitize(csvValues[0].trim())).Vendor_Name__c != this.deSanitize(csvValues[2].trim())){
                        poMap.get(this.deSanitize(csvValues[0].trim())).PO_Title__c    = this.deSanitize(csvValues[1].trim());
                        poMap.get(this.deSanitize(csvValues[0].trim())).Vendor_Name__c = this.deSanitize(csvValues[2].trim());
                        updatePO.add(poMap.get(this.deSanitize(csvValues[0].trim())));
                        toUpdateRownum.put(countUpdate,lineNo);
                        poMapUpdate.put(countUpdate,poName);
                        countUpdate++;
                    }
                }
                lineNo++;
              
            }
         
        }
      
        if(importPO!=null && importPO.size()>0){
            //insert importPO;
            Database.SaveResult[] srList = Database.insert(importPO, false);
            Integer ln = 0;
            for (Database.SaveResult sr : srList) {
                if (sr.isSuccess()) {
                    //importCount ++;             
                }else{
                    success = false;
                    // Operation failed, so get all errors              
                    for(Database.Error e : sr.getErrors()) {
                        Integer num = toInsertRownum.get(ln) + rowCount + 1;
                        //remarks = remarks + 'ERROR: Row '+ num + '; '+ 'PO No. '+  poMapInsert.get(ln)+'; '+e.getMessage()+'\n';
                        remarks = remarks + 'ERROR: Row '+ num +'; '+e.getMessage()+'\n';
                    }
                }
                ln++;
            }       
          
        }
        if(updatePO!=null && updatePO.size()>0){
           // update updatePO;
            Database.SaveResult[] srList = Database.update(updatePO, false);
            Integer ln = 0;
            for (Database.SaveResult sr : srList) {
                if (sr.isSuccess()) {
                    //importCount ++;             
                }else{
                    success = false;
                    // Operation failed, so get all errors              
                    for(Database.Error e : sr.getErrors()) {
                        Integer num = toUpdateRownum.get(ln) + rowCount + 1;
                        //remarks = remarks + 'ERROR: Row '+ num + '; '+ 'PO No. '+  poMapUpdate.get(ln)+'; '+e.getMessage()+'\n';
                        remarks = remarks + 'ERROR: Row '+ num +'; '+e.getMessage()+'\n';
                    }
                }
                ln++;
            }
        }
      
        rowCount =  rowCount + lineNo;         
        importLog.Nubmer_of_Rows__c =  rowCount;
        update importLog;

    }
    global void finish(Database.BatchableContext batchableContext){
      
      
        NSRCOR_MER_Import_Log__c importLog = [SELECT Id,Name,Status__c,Remarks__c,Import_Count__c, Nubmer_of_Rows__c FROM NSRCOR_MER_Import_Log__c WHERE Id=:m_batchLogId];
        if(!success){
            if(remarks.length() > 2000){
                importLog.Remarks__c = remarks.substring(0,2000);
            }else{
                importLog.Remarks__c = remarks;
            }
            importLog.Status__c = 'Failed';
          
        }
        //importLog.Import_Count__c = String.valueOf(importCount);
        importLog.Nubmer_of_Rows__c =0;
        Update importLog;
      
        Database.executeBatch(new NSRCOR_MER_Items_Import_Batch(m_csvFile, m_batchLogId));
      
    }
  
    private String deSanitize(String value) {
        return value.replace('#comma#',',').replace('#dbc#','"');
    }
}
Best Answer chosen by jasmin05
pbattissonpbattisson
Okay so reviewing the errors your only solution is to split the file down. Try using a file of size 10000 to start with and then scale up to see where your sweet spot is. The heap size error is because you are loading such a large record in memory which is causing the heap to be consumed. Again reducing file size will resolve this.

All Answers

pbattissonpbattisson
Because you are using an iterable the standard SOQL limit for the number of records retrieved is still placed on the system (so your iterable can only have 50,000 items within it).

Sadly the solution is to split the csv up into multiple files with a maximum of 50000 records. 
jasmin05jasmin05
Thank you, pbattisson.
I noticed that my code can't handle even 30,000 items.
Do you know if there is any way to handle over 30,000 items?
As my user needs to update over 100,000 items frequently...
pbattissonpbattisson
What error are you getting? Can you also post the code you are using to execute your batch? Normally reducing batch size will help you to process more (although it will be slower).

As stated the limit is 50,000 so your user will need to do multiple uploads (or you will need to split the uploaded file before running the batch upon it)
jasmin05jasmin05
Thank you for your reply.
I will prepare Macro file to split the csv...

The error message is this;
"Batchable instance is too big: NSRCOR_MER_Import_Batch
Error is in expression '{!importCsv}' in component <apex:commandButton> in page nsrcor_mer_import: External entry point"

And also sometime I received the following error message.
"System.LimitException: Apex heap size too large: 14264285 External entry point"

The code which is used to execute batch is this;
**************************************************************
public Class NSRCOR_MER_Import_Ext {
    //==========================================================   
    // Property
    //==========================================================   

    public transient String fileName{get; set;}
    public transient Blob fileContent{get ;set;}
    public List<NSRCOR_MER_PO__c> po {get; set;}
    public List<NSRCOR_MER_Shipment__c> ship {get; set;}
    public List<NSRCOR_MER_PO_Line_Item_No__c> lineItem {get; set;}
    public String  dtToday   {get;set;}
    public String xlsHeader {
        get {
            String strHeader = '';
            strHeader += '<?xml version="1.0"?>';
            strHeader += '<?mso-application progid="Excel.Sheet"?>';
            return strHeader;
        }
    }
   
    public List<NSRCOR_MER_Import_Log__c> logList {get;set;}

    //==========================================================   
    // Constructor
    //==========================================================   

    public NSRCOR_MER_Import_Ext() {
        this.po = new List<NSRCOR_MER_PO__c>();
        this.lineItem = new List<NSRCOR_MER_PO_Line_Item_No__c>();
        this.ship = new List<NSRCOR_MER_Shipment__c>();
       
        logList =
      [SELECT Id,
              Name,
              Status__c,
              Remarks__c,
              Import_Count__c,
              LastModifiedBy.Name,
              LastModifiedDate
         FROM NSRCOR_MER_Import_Log__c
        ORDER BY Name DESC
        LIMIT 20];

    }
   
    //==========================================================   
    // For Command Button
    //==========================================================   
   
    public PageReference importCsv() {
        PageReference pgr = Page.NSRCOR_MER_Import;
        //Parameter Check
        if (String.isBlank(this.fileName) || this.fileContent == null || this.fileContent.size() == 0) {
            ApexPages.addMessage(new ApexPages.Message(ApexPages.severity.ERROR, 'Please select an import csv file.'));
            return pgr;
        }
       
        String batchLogId = null;
        try{
            NSRCOR_MER_Import_Log__c importLog = new NSRCOR_MER_Import_Log__c();
            importLog.Status__c = 'In-progress';
            insert importLog;
           
            batchLogId = importLog.Id;

            if (logList.size() > 0) {
              logList.add(0,[SELECT Id,
                                    Name,
                                    Status__c,
                                    Remarks__c,
                                    Import_Count__c,
                                    LastModifiedBy.Name,
                                    LastModifiedDate
                               FROM NSRCOR_MER_Import_Log__c
                              WHERE Id=:batchLogId]);
            } else {
              logList.add([SELECT Id,
                                  Name,
                                  Status__c,
                                  Remarks__c,
                                  Import_Count__c,
                                  LastModifiedBy.Name,
                                  LastModifiedDate
                             FROM NSRCOR_MER_Import_Log__c
                            WHERE Id=:batchLogId]);
            }
       
            fileName = fileContent.toString();
            Database.executeBatch(new NSRCOR_MER_Import_Batch(fileName, batchLogId));
            fileName = '';
            batchLogId='';
           
        } catch(Exception ex){
            ApexPages.addMessage(new ApexPages.Message(ApexPages.severity.ERROR, 'Invalid file. Please choose a CSV file to import.'));
            ApexPages.addMessage(new ApexPages.Message(ApexPages.severity.ERROR, ex.getMessage()));
            NSRCOR_MER_Import_Log__c importLog = [SELECT Id,Name,Status__c,Remarks__c FROM NSRCOR_MER_Import_Log__c WHERE Id=:batchLogId];
            importLog.Status__c = 'Failed';
            importLog.Remarks__c = 'Invalid file. Please choose a CSV file to import.';
            update importLog;
            return pgr;
        }
        ApexPages.addMessage(new ApexPages.Message(ApexPages.severity.INFO, 'Import processing was started. Please check Import log. To view the latest status, please click [MER Import] Tab.'));

        return pgr;
    }
    //==========================================================   
    // Private Method
    //==========================================================
   
    @TestVisible private String deSanitize(String value) {
        return value.replace('#comma#',',').replace('#dbc#','"');
    }

}
pbattissonpbattisson
Okay so reviewing the errors your only solution is to split the file down. Try using a file of size 10000 to start with and then scale up to see where your sweet spot is. The heap size error is because you are loading such a large record in memory which is causing the heap to be consumed. Again reducing file size will resolve this.

This was selected as the best answer
jasmin05jasmin05
OK, I will split the file down. Thank you very much for your kind support!!