function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
Jina ChetiaJina Chetia 

Apex Heap size error in Batch Apex class

Hi,

 

I am getting a apex heap size error in my batch apex class. - System.LimitException: Apex heap size too large: 6030143.

The records that needs to be processed are in the range of 10k to 50K. I think it is because of the map that I am using in my code.

 This is how my code looks like

global class CreatePortfolioReport implements Database.Batchable<sObject>, Database.Stateful{
	public String query;
	global String email;

	Map<String, Purchase_Sales_Report__c[]> purchaseMap = new Map<String, Purchase_Sales_Report__c[]>();
	Map<String, Purchase_Sales_Report__c[]> salesMap = new Map<String, Purchase_Sales_Report__c[]>();
	
	
	global Database.QueryLocator start(Database.BatchableContext BC){
        return Database.getQueryLocator(query);
    }
    
    global void execute(Database.BatchableContext BC, List<sObject> scope){
    	 for(sObject s: scope){
    	 	Purchase_Sales_Report__c ps = (Purchase_Sales_Report__c)s;
    	 	if(ps.Transaction__c != null){
	    	 	if(ps.Transaction__c.equals('Purchase')){
		    	 	if(!purchaseMap.isEmpty() && purchaseMap.containsKey(ps.Unique_Name__c)){
		    	 		purchaseMap.get(ps.Unique_Name__c).add(ps);
		    	 	}
		    	 	else{
		    	 		List<Purchase_Sales_Report__c> newList = new List<Purchase_Sales_Report__c>();
		    	 		newList.add(ps);
		    	 		purchaseMap.put(ps.Unique_Name__c, newList);
		    	 	}
	    	   }
	    	   else if(ps.Transaction__c.equals('Sales')){
	    	   		if(!salesMap.isEmpty() && salesMap.containsKey(ps.Unique_Name__c)){
		    	 		salesMap.get(ps.Unique_Name__c).add(ps);
		    	 	}
		    	 	else{
		    	 		List<Purchase_Sales_Report__c> newList = new List<Purchase_Sales_Report__c>();
		    	 		newList.add(ps);
		    	 		salesMap.put(ps.Unique_Name__c, newList);
		    	 	}
	    	   }
    	 	}
    	 } 
    	 System.debug('Purchase Map size'+purchaseMap.size());
    	 System.debug('Sales Map size'+salesMap.size());
    }
    
    global void finish(Database.BatchableContext BC){
    	Map<String, Double> salesUnits = new Map<String, Double>();
    	Map<String, Double> purchaseUnits = new Map<String, Double>();
    	Map<String, Portfolio_Report__c> portfolioMap = new Map<String, Portfolio_Report__c>();
    	List<Temp_Purchase_Report__c> purchaseList = new List<Temp_Purchase_Report__c>();
    	for(String uniqueName: salesMap.keySet()){
  			Double units = 0;
  			Double purchaseAmount = 0;
  			for(Purchase_Sales_Report__c ps:salesMap.get(uniqueName)){
  				
  				if(ps.Units__c != null){
  					units += ps.Units__c;
  				}
  			}
  			salesUnits.put(uniqueName, units);
  		}
  		System.debug('Sales Map'+salesMap.size());
  		for(String uniqueName: purchaseMap.keySet()){
  			
  	        Double units;
  			if(salesUnits.containsKey(uniqueName)){
  				units = Math.abs(salesUnits.get(uniqueName));
  			}
  			Double pUnits = 0;
  			Double product = 0;
  			Double portUnits = 0;
  			Double portAmount = 0;
  			Double divReinvAmount = 0;
  			Double divAmount = 0;
  			Double stpAmount = 0;
  			Boolean entityFlag = true;
  			Id entity;
  			String folio;
  			String assetClass;
  			String schemeName;
  			for(Purchase_Sales_Report__c ps:purchaseMap.get(uniqueName)){
  				
  				if(units != null && pUnits != units){
  					if(ps.Units__c != null){
  						pUnits += ps.Units__c;
  					}
  					
  				}
  				else{
  					
  					if(ps.Units__c != null){
  						portUnits += ps.Units__c;
  					}
  					if(ps.Amount__c != null && ps.Type__c != null){
  						
  						if(ps.Type__c.equalsIgnoreCase('NOR') || ps.Type__c.equalsIgnoreCase('SIP')){
  							portAmount += ps.Amount__c;
  						}
  						else if(ps.Type__c.equalsIgnoreCase('DIR')){
  							divReinvAmount += ps.Amount__c;
  						}
  						else if(ps.Type__c.equalsIgnoreCase('STI') || ps.Type__c.equalsIgnoreCase('SWI')){
  							stpAmount += ps.Amount__c;
  						}
  						else if(ps.Type__c.equalsIgnoreCase('DVP')){
  							divAmount += ps.Amount__c;
  						}
  					}
  					if(ps.Product__c != null){
  						product += ps.Product__c;
  					}
  					if(entityFlag){
	  					entity = ps.Entity__c;
	  					folio = ps.Folio_Number__c;
	  					assetClass = ps.Asset_Class__c;
	  					entityFlag = false;
	  					schemeName = ps.Scheme_Name__c;
  					}
  					System.debug('Create Port Units'+portUnits+'Amount'+portAmount+'Product'+product);
  				}
  				
  			}
  			if(portUnits != 0 && product != 0 && (portAmount != 0 || divAmount !=0 || divReinvAmount !=0 || divAmount != 0) ){
  				Temp_Purchase_Report__c pr = new Temp_Purchase_Report__c(Entity__c= entity, 
  																		 Folio_Number__c = folio, 
  																		 Asset_Class__c = assetClass,
  																		 UniqueName__c = uniqueName, 
  																		 Purchase_Amount__c= portAmount, 
  																		 Units_Quanitity__c = portUnits, 
  																		 Product__c = product,
  																		 Dividend_Reinvested__c = divReinvAmount,
  																		 Dividend__c = divAmount,
  																		 STP_Switch__c = stpAmount,
  																		 Scheme_Scrip_Name__c = schemeName);
  				purchaseList.add(pr);
  			}
  		}
  		System.debug('Purchase List'+purchaseList.size());
  		
  		upsert purchaseList UniqueName__c;
  		  		
  		AsyncApexJob a = [Select Id, 
                                 Status,
                                 NumberOfErrors, 
                                 JobItemsProcessed,  
                                 TotalJobItems, 
                                 CreatedBy.Email 
                                 from AsyncApexJob 
                                 where Id =:BC.getJobId()];
        // Create and send an email with the results of the batch.
        Messaging.SingleEmailMessage mail = new Messaging.SingleEmailMessage();
        mail.setToAddresses(new String[] {email});
        mail.setReplyTo('');
        mail.setSenderDisplayName('Batch Processing');  
        mail.setSubject('Create Portfolio Report ' + a.Status);
        mail.setPlainTextBody('The batch apex job processed ' + a.TotalJobItems +   ' batches with ' + a.NumberofErrors + ' failures.');
    
        Messaging.sendEmail(new Messaging.SingleEmailMessage[] { mail });
  		
    }
}

 As I require the map for processing the data in my finish method, I cannot think of any other option to implement it.

 

Can you please suggest me ways to avoid the error?

 

Thanks,

Jina

Ispita_NavatarIspita_Navatar

Hi Jina,

I think the issue is more to do with the quantum of data , rather than the data structure being used. So i think the use Map of Map may not be the cause, but yes if you are holding the data in memory then high volume of data may cause an issue.

I too faced this issue in one of my VF page having a grid , what we did was to find out the batchsize of data which didnot gave any issue with heapsize. So you need to use a hit and try method and find out the batchsize  in which the batch limitation is not thrown. Also alternatively if in your flow of code , you can release memory no longer needed by your code.

 

 

Did this answer your question? If not, let me know what didn't work, or if so, please mark it solved.

Jina ChetiaJina Chetia

Thanks Ispita for the response but reducing the batch size does not help as I am storing the data in the maps for all the batches. I also don't see any other place where I can release the memory.

 

Is there any other way to resolve the issue?

Jesus.ArcasJesus.Arcas

Hi, Jina

 

We have faced that issue in the past, and our code is very similar to yours. The issue comes from your maps purchaseMap and salesMap and Database.stateful clause. That clause maintains the state of that variables, which means that with every batch execution, the size of those Maps will grow and grow. Those maps are stored in the Heap, thus they will hit the governor limit if the amount of records processed is high enough.

 

My suggestion to avoid the governor limit: Do not use maps, try to use SOQL queries instead. Remember you have 100 SOQL per batch, so take advantage of that. This was the way we solved this issue.

 

Hope it helps you.

 

Regards,
Jesus Arcas.

Jesus.ArcasJesus.Arcas

Hi again.

 

I have just noticed you afirm you need maps for your finish method. What about adding a custom field in the object, classifying every record as Sales or Purchases? That way, you do not need maps, just a couple of queries in the finish method. Batch process would update all records during execute with no limits issues.

 

Hope that helps.

 

Regards,

Jesus Arcas.