function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
Karna ShivaKarna Shiva 

I am inserting more than 10000 rows through batch apex and how escape from this error --- Too many DML rows: 10001?

SamuelDeRyckeSamuelDeRycke
Can you reduce your scope so that you can limit the amount of records you're inserting to 10000? It's a hard limit per batch execution, no work arounds.
Nilesh Jagtap (NJ)Nilesh Jagtap (NJ)
Hi Karna,

What is the batch size you have set? It seems like you are processing 1 record at a time.
For more info on batch apex please go through http://www.salesforce.com/us/developer/docs/apexcode/Content/apex_batch_interface.htm
Thanks,
N.J
Shri RajShri Raj
This error is seen when a user performs DML operation on an entity(s) which contains more than 100 records in a single transaction , i,e if a user executes upsert/update/insert operation on a collection object which contains more than 100 records a user friendly error message "Error of Too many DML rows when a DML statement has been executed "is displayed.  This is governor limit error.

In order to resolve this issue, asynchronous method (@future annotation) needs to be used. Asynchronous methods increase the limits to 10,000, but do not scale with batch size (note there is also a limit for the number of asynchronous method invocations per transaction of 10). Please note, @future method runs only there is enough system resource available
Karna ShivaKarna Shiva
I have tried with batch size 10 or 20, but there is no use still i am getting error, let me try with @future annatation.
Shri RajShri Raj
Can you paste your code in here?
Karna ShivaKarna Shiva
Please see the below code

global class RebateBatch implements Database.Batchable<SObject>
{

   global void finish(Database.BatchableContext bc)
  {
RebateGPBatch ord = new RebateGPBatch();
        Database.executeBatch(ord,10);

    }
}


global class RebateGPBatch implements Database.Batchable<SObject>
{
 
    global Database.QueryLocator start(Database.BatchableContext bc)
 
  {
       string query = '';
       query += ' select Opportunity_Name__c  from Rebate_Amount__c ';
     
       if(Test.isRunningTest())
       {
          query  = ' select id from Rebate_Amount__c  ';
          query += ' limit 20';
       }

       return Database.getQueryLocator(query);
  }
  global void execute(Database.BatchableContext bc, List<SObject> scope)
  {
 
     set<Id> rebid= new set<Id>();
     
   
      for (sObject obj :scope)
      {
         Rebate_Amount__c   op = (Rebate_Amount__c)obj;
           
         rebid.add(op.id);
      }


     
list<Shipped__c>  listSch =[select cost__c,Material__c  ,Opportunity2__c
                                                  from Shipped__c
                                                     where Quote__c!=null and Opportunity2__r.StageName='S' and Opportunity2__c = :rebid];

map<string,Shipped__c> mapShipped= new map<string,Shipped__c>();

for(Shipped__c sch : listSch)
{
mapShipped.put(sch.Material__c + sch.Opportunity2__c,sch);
}

  //GP insert - start
   List<Gross_Profit__c> GPList = new List<Gross_Profit__c>(); 


List<Rebate_Amount__c> Gp_data = [Select Material__c,Quote_Number__c,Company_Code__c,Rebate_by_Amount__c,Item_Number__c,Active_Flag__c,Net_Sales_Amount__c from Rebate_Amount__c];


for(Rebate_Amount__c oli : Gp_data)
{

   Gross_Profit__c  gp= new Gross_Profit__c();
   gp.Quote_No__c= oli.Quote_Number__c;
   gp.Company__c=oli.Company_Code__c;
   gp.Item_Number__c = oli.Item_Number__c;
   gp.Rebate__c = oli.Rebate_by_Amount__c;
   gp.Active_Flag__c = oli.Active_Flag__c;
   gp.Net_Sales__c = oli.Net_Sales_Amount__c;
   string keyMap_Cost=oli.Material__c + oli.Quote_Number__c;
 
   if (mapShipped.containskey(keyMap_Cost))
    {
    gp.Cost__c = mapShipped.get(keyMap_Cost).cost__c;
    }
    GPList.add(gp);
     }
   if(GPList.size()>0)
   {
      system.debug('GP Insert section ');
   insert GPList;
   }


   // GP insert - end

}

   global void finish(Database.BatchableContext bc)
  {

    }



}
Karna ShivaKarna Shiva
I am able to insert More than 10,000 rows using the below example code


global class RebateGPScheduler implements Schedulable
{

   global void executeScheduleContext bc)
  {
    RebateGPBatch ord = new RebateGPBatch();
        Database.executeBatch(ord,80);

    }
}



global class RebateGPBatch implements Database.Batchable<SObject>
{
}

I think batch size will work out on Schedule class only.

previously i was called RebateGPBatch job class on another Batch job class.

Karna ShivaKarna Shiva

one more issue 

Rebate_Amount__c object having - 92255 rows and suppose to insert 92255 rows on gross profit, but it is inserting more than 1 laks rows

global class RebateGPBatch implements Database.Batchable<SObject>
{
 
    global Database.QueryLocator start(Database.BatchableContext bc)
 
  {
       string query = '';
       query += ' select Opportunity_Name__r.id  from Rebate_Amount__c ';
     
       if(Test.isRunningTest())
       {
          query  = ' select id from Rebate_Amount__c  ';
          query += ' where company__c = \'SDS\' ';
          query += ' limit 20';
       }

       return Database.getQueryLocator(query);
  }

  global void execute(Database.BatchableContext bc, List<SObject> scope)
  {
 
     List<Id> optyWithQuoteSet = new List<Id>();
     
   
      for (sObject obj :scope)
      {
         Rebate_Amount__c   op = (Rebate_Amount__c)obj;
           
         optyWithQuoteSet.add(op.Opportunity_Name__r.id);
      }


     
list<Shipped__c>  listSch =[select cost__c,Material__c  ,Opportunity2__c
                                                  from Shipped__c
                                                     where Quote__c!=null and Opportunity2__r.StageName='S' and Opportunity2__c = :optyWithQuoteSet];

map<string,Shipped__c> mapShipped= new map<string,Shipped__c>();

for(Shipped__c sch : listSch)
{
mapShipped.put(sch.Material__c + sch.Opportunity2__c,sch);
}

  //GP insert - start
   List<Gross_Profit__c> GPList = new List<Gross_Profit__c>(); 

Rebate_Amount__c object having - 92255 rows and suppose to insert 92255 rows on gross profit, but it is inserting more than 1 laks rows

List<Rebate_Amount__c> Gp_data = [Select Material__c,Quote_Number__c,Company_Code__c,Rebate_by_Amount__c,Item_Number__c,Active_Flag__c,Net_Sales_Amount__c from Rebate_Amount__c where Opportunity_Name__r.id=:optyWithQuoteSet];


for(Rebate_Amount__c oli : Gp_data)
{

   Gross_Profit__c  gp= new Gross_Profit__c();
   gp.Quote_No__c= oli.Quote_Number__c;
   gp.Company__c=oli.Company_Code__c;
   gp.Item_Number__c = oli.Item_Number__c;
   gp.Rebate__c = oli.Rebate_by_Amount__c;
   gp.Active_Flag__c = oli.Active_Flag__c;
   gp.Net_Sales__c = oli.Net_Sales_Amount__c;
   string keyMap_Cost=oli.Material__c + oli.Quote_Number__c;
 
   if (mapShipped.containskey(keyMap_Cost))
    {
    gp.Cost__c = mapShipped.get(keyMap_Cost).cost__c;
    }
    GPList.add(gp);
     }
   if(GPList.size()>0)
   {
      system.debug('GP Insert section ');
   insert GPList;
   }


   // GP insert - end

}

   global void finish(Database.BatchableContext bc)
  {

    }



}
Karna ShivaKarna Shiva
I have solved this issue using scheduler class and batch size
Varun NiralaVarun Nirala
If you are facing this issue while processing DML on more than 10K records synchronously / or on VF page screen, then Queueable-interface is the solution for you. if you need more details then following following link.

http://www.exampeta.com/article/agpzfmV4YW1wZXRhchULEghBcnRpY2xlcxiAgICg9tbOCww/Salesforce-APEX---how-to-perform-dml-on-more-than-10000-rows-in-VF-Apex-using-Queueable-interface