function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
P.kumar 27P.kumar 27 

@isTest public class AmazonS3TriggerTest { @isTest public static void docTest(){ opportunity opp = new opportunity(); opp.Name = 'test1'; opp.StageName = 'Collecting Docs'; opp.CloseDate =system.today().addDays(30);

Hi All,
I have written test class and getting code coverage 70 %.
Can anyone help me to get 90% code coverage using this test class
P.kumar 27P.kumar 27
This is the handler class
public class AmazonS3TriggerHandler {
    private static Map<String,Document_Checklist_to_Stage__mdt> sfNameToDocNameMap;
    private static Boolean isRun = true;
    public static void updateDocumentCheckList(List<Amazon_S3_Files__c> s3List, Map<Id,Amazon_S3_Files__c> oldS3Map){
        Map<String , Document_Checklist_to_Stage__mdt> DocumentMap = Document_Checklist_to_Stage__mdt.getAll();
        
        List<Amazon_S3_Files__c> AmazonS3FilesUpdate = new List<Amazon_S3_Files__c>();
        List<String> docNameList = new List<String>();
        sfNameToDocNameMap = new Map<String,Document_Checklist_to_Stage__mdt>();
        for( Document_Checklist_to_Stage__mdt document : DocumentMap.values() ){
            docNameList.add(document.SF_Folder_Name__c);
            system.debug('document.Document_Checklist_Name__c-->'+document.SF_Folder_Name__c);
            sfNameToDocNameMap.put(document.SF_Folder_Name__c,document);
        }
        system.debug('sfNameToDocNameMap-->'+sfNameToDocNameMap);
        Set<String> oppIds = new Set<String>();
        Set<String> folderNames = new Set<String>();
        Set<String> documentKey = new Set<String>();
        for(Amazon_S3_Files__c s3: s3List){
            system.debug('s3.Folder_Name__c-->'+s3.Folder_Name__c); 
            system.debug('s3.Opportunity__c-->'+s3.Opportunity__c);
            
            if(s3.Opportunity__c!=null){
                oppIds.add(s3.Opportunity__c);
            }
            if(docNameList.contains(s3.Folder_Name__c)){
                folderNames.add(s3.Folder_Name__c);
            } else if(s3.Document_Key__c != null){
                documentKey.add(s3.Document_Key__c);
            }
            
        }
        
        Set<String> s3IdSet = new Set<String>();
        
        if(oppIds.size()>0 && (folderNames.size()>0  || documentKey.size()>0) ){
            Map<String,List<Document__c>> existingDoc = getAllDocuments(oppIds,folderNames, documentKey);
            system.debug('existingDoc-->'+existingDoc);
            List<Document__c> docsToUpsert = new List<Document__c>();
            //Id externalRecordTypeId = Schema.SObjectType.Document__c.getRecordTypeInfosByName().get('External').getRecordTypeId();
            //Id internalRecordTypeId = Schema.SObjectType.Document__c.getRecordTypeInfosByName().get('Internal').getRecordTypeId();
            
            
            for(Amazon_S3_Files__c s3: s3List){
                if( (docNameList.contains(s3.Folder_Name__c) || documentKey.contains(s3.Document_Key__c)) && s3.Opportunity__c!=null){ 
                    if(existingDoc.containsKey(s3.Opportunity__c)){
                        List<Document__c> docList = existingDoc.get(s3.Opportunity__c);
                        Map<String,List<Document__c>> existingNameDocs = documentMap( docList);
                        System.debug('existingNameDocs-->'+existingNameDocs); 
                        System.debug('existingNameDocs.containsKey(s3.Folder_Name__c)-->'+existingNameDocs.containsKey(s3.Folder_Name__c)); 
                        Document__c findDoc = null;
                        if(existingNameDocs.containsKey(s3.Folder_Name__c)){
                            findDoc = existingNameDocs.get(s3.Folder_Name__c)[0];
                        } else if(existingNameDocs.containsKey(s3.Document_Key__c)){ 
                            findDoc =  existingNameDocs.get(s3.Document_Key__c)[0];
                        } 
                        if(findDoc != null){
                            
                            Document__c existDoc = findDoc;
                            existDoc.Amazon_S3_File__c=s3.Id;
                            existDoc.Status__c ='Uploaded';
                            if(s3.ID__c != null){
                                existDoc.ID__c = s3.ID__c;
                            }
                            if(s3.Mortgage_Statement__c != null){
                                existDoc.Mortgage_Statement__c = s3.Mortgage_Statement__c;
                            }
                            
                            /*if(s3.Source__c=='Website'){
existDoc.RecordTypeId=externalRecordTypeId;
}else{
existDoc.RecordTypeId=internalRecordTypeId;
}*/
                            docsToUpsert.add(existDoc);
                        }else{
                            // need to create new record
                            
                            Document__c newDocs = createDocument(s3, customDocCheckStageMdtRecord(s3));                         
                            docsToUpsert.add(newDocs);
                        }
                        
                    }else{ 
                        // need to create new record
                        //Document__c newDocs = createDocument(s3,sfNameToDocNameMap.get(s3.Folder_Name__c));                         
                        Document__c newDocs = createDocument(s3,customDocCheckStageMdtRecord(s3));                         
                        docsToUpsert.add(newDocs);
                    }
                }else{
                    // do nothing, either opportunity not exist or folder name not required to create
                }
            }
            System.debug('docsToUpsert-->'+docsToUpsert);
            if(docsToUpsert.size()>0){
                //upsert docsToUpsert;
                //createTask( docsToUpsert);
            }
        }
        
    }
    
    private static Document_Checklist_to_Stage__mdt customDocCheckStageMdtRecord(Amazon_S3_Files__c s3){
        Document_Checklist_to_Stage__mdt document;
        if(sfNameToDocNameMap.containsKey(s3.Folder_Name__c)){
            document = sfNameToDocNameMap.get(s3.Folder_Name__c);
        }
        else {
            document = new Document_Checklist_to_Stage__mdt();
            document.Document_Checklist_Name__c = s3.Folder_Name__c;
            document.Record_Type__c = 'Third_Party';       
        }
        System.debug('document : ' + document);
        return document;
    }
   public static  Map<String,List<Document__c>> documentMap(List<Document__c> docList){
        Map<String,List<Document__c>> existingNameDocs = new Map<String,List<Document__c>>();
        for(Document__c docs: docList){
            if(!existingNameDocs.containsKey(docs.Name)){
                existingNameDocs.put(docs.SF_Folder_Name__c,new List<Document__c>());
            }
            existingNameDocs.get(docs.SF_Folder_Name__c).add(docs);
            if(docs.Document_Key__c != null){
                existingNameDocs.put(docs.Document_Key__c,new List<Document__c>());    
                existingNameDocs.get(docs.Document_Key__c).add(docs);
            }    
        }
        return existingNameDocs;
    }
    
    public static Document__c createDocument(Amazon_S3_Files__c s3, Document_Checklist_to_Stage__mdt document){
        String Name = document.Document_Checklist_Name__c;
        //Id externalRecordTypeId = Schema.SObjectType.Document__c.getRecordTypeInfosByName().get('External').getRecordTypeId();
        //Id internalRecordTypeId = Schema.SObjectType.Document__c.getRecordTypeInfosByName().get('Internal').getRecordTypeId();
        Id recordTypeId = Schema.SObjectType.Document__c.getRecordTypeInfosByDeveloperName().get(document.Record_Type__c).getRecordTypeId();
        System.debug('recordTypeId-->'+recordTypeId);
        Document__c newDocs =new Document__c();
        newDocs.SF_Folder_Name__c=s3.Folder_Name__c;
        newDocs.Amazon_S3_File__c=s3.Id;
        newDocs.Opportunity__c=s3.Opportunity__c;
        newDocs.Document_Key__c=s3.Document_Key__c;
        newDocs.Status__c='Uploaded';
        newDocs.RecordTypeId=recordTypeId;
        /*if(s3.Source__c=='Website'){
newDocs.RecordTypeId=externalRecordTypeId;
}else{
newDocs.RecordTypeId=internalRecordTypeId;
}*/
        newDocs.Name=Name;
        
        return newDocs;
    }

----------------------------------------------------------------------------------------------------------------------------------------------
and  this is the trigger

trigger AmazonS3Trigger on Amazon_S3_Files__c (before insert, after insert, after update) {
    
    
    if(SPR_TriggerUtility.ByPassAllTrigger) return ;
    
    if(Trigger.isInsert && Trigger.isBefore){
        AmazonS3TriggerHandler.updateAmazonS3(Trigger.New);
    }
    
    if(Trigger.isInsert && Trigger.isAfter){
        AmazonS3TriggerHandler.updateDocumentCheckList(Trigger.New, Trigger.oldMap);
    }
    
    if(Trigger.isUpdate && Trigger.isAfter){
        AmazonS3TriggerHandler.updateDocumentChecklistFields(Trigger.New);
    }
}

I want to know how to increase test code coverage of handler class to 100 percent