function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
deepak_naikdeepak_naik 

ClientInputError - exceptionMessage='Server error returned in unknown format' while using Bulk Query

I am using Bulk Query, reading one batch at a time, and writing to the standard output. When the records size is small like 100000 records, the job works fine, but when I try to extract records like for example 7 million records (7000000 records), the job aborts with the following exception
Job id is 750A0000004YXlsIAG [AsyncApiException exceptionCode='ClientInputError' exceptionMessage='Server error returned in unknown format' ]
at com.sforce.async.BulkConnection.parseAndThrowException(BulkConnection.java:190) at com.sforce.async.BulkConnection.doHttpGet(BulkConnection.java:747) at com.sforce.async.BulkConnection.getBatchInfo(BulkConnection.java:557) at com.sforce.async.BulkConnection.getBatchInfo(BulkConnection.java:550)
Attached is the source code, and the Salesforce Jar files that I use is force-partner-api-39.0.0.jar force-wsc-39.0.0.jar jackson-core-asl-1.9.13.jar jackson-mapper-asl-1.9.13.jar

Anything that I am missing here
deepak_naikdeepak_naik
import java.io.*;
import java.util.*;

import com.sforce.async.*;
import com.sforce.soap.partner.PartnerConnection;
import com.sforce.ws.ConnectionException;
import com.sforce.ws.ConnectorConfig;

    
public class BulkQueryPerformanceObject {
	
	static int finalBatchCount=0;	
    static JobInfo job = new JobInfo();
    static BulkConnection bulkConnection = null;
    BatchInfo[] bListInfo = null;
    int numberOfBatchesForQueryExtract = 0;
    int numberOfRecordsExtracted = 0;
    long startTime = System.nanoTime();


    public static void main(String[] args)
      throws AsyncApiException, ConnectionException, IOException {
        BulkQueryPerformanceObject exampleQuery = new BulkQueryPerformanceObject();
        System.setProperty("https.protocols", "TLSv1.2");
        bulkConnection = exampleQuery.login();
        bulkConnection.addHeader("Sforce-Enable-PKChunking","chunkSize=10000");
        exampleQuery.doBulkQuery(bulkConnection);
    }
    
    
    public BulkConnection login() {
    	  
          String userName = "testing1@ibm.com";
          String passWord = "abcdef12345676";
          String url = "https://login.salesforce.com/services/Soap/u/39.0";
          BulkConnection _bulkConnection = null;
    	  try {
    		
    	        ConnectorConfig partnerConfig = new ConnectorConfig();
    	        partnerConfig.setUsername(userName);
    	        partnerConfig.setPassword(passWord);
    	        partnerConfig.setAuthEndpoint(url);
    	        new PartnerConnection(partnerConfig);
    	        ConnectorConfig config = new ConnectorConfig();
    	        config.setSessionId(partnerConfig.getSessionId());
    	        String soapEndpoint = partnerConfig.getServiceEndpoint();
    	        String apiVersion = "39.0";
    	        String restEndpoint = soapEndpoint.substring(0, soapEndpoint.indexOf("Soap/"))
    	            + "async/" + apiVersion;
    	        config.setRestEndpoint(restEndpoint);
    	        config.setCompression(true);
    	        config.setTraceMessage(false);
    	        _bulkConnection = new BulkConnection(config);
    	  } catch (AsyncApiException aae) {
    	    aae.printStackTrace();
    	  } catch (ConnectionException ce) {
    	    ce.printStackTrace();
    	  }
    	  return _bulkConnection;
    	}

    
    
    public void doBulkQuery(BulkConnection bulkConnection) {

    	  try {
    	    job.setObject("PerformanceObject__c"); 	    
    	    job.setOperation(OperationEnum.query);
    	    job.setConcurrencyMode(ConcurrencyMode.Parallel);
    	    job.setContentType(ContentType.CSV);
    	    
    	    job = bulkConnection.createJob(job);
    	    assert job.getId() != null;
    	    System.out.println("Job id is " + job.getId());

    	    
    	    String query = "Select Id, Name, OwnerId From PerformanceObject__c";
    	    ByteArrayInputStream bout = new ByteArrayInputStream(query.getBytes()); 
    	    bulkConnection.createBatchFromStream(job, bout);
    	            	     	    
    	    bListInfo = bulkConnection.getBatchInfoList(job.getId()).getBatchInfo();
    	    
    	    while(bListInfo.length < 2)
    	    	bListInfo = bulkConnection.getBatchInfoList(job.getId()).getBatchInfo();

    	    finalBatchCount++;
  	        BatchInfo info = bListInfo[finalBatchCount];
  	      	numberOfBatchesForQueryExtract++;
    	    
  	      	String[] queryResults = null;

  	      	for(int i=0; i<10000; i++) 
  	      	{
  	      	    info = bulkConnection.getBatchInfo(job.getId(), info.getId());
    	          
  	      	    if (info.getState() == BatchStateEnum.Completed) 
  	      	    {
  	      	    	QueryResultList list = bulkConnection.getQueryResultList(job.getId(),info.getId());             
  	      	    	queryResults = list.getResult();
  	      	    	break;
  	      	    } 
  	      	    else if (info.getState() == BatchStateEnum.Failed) 
  	      	    {
  	      	    	System.out.println("-------------- failed ----------" + info);    	            
  	      	    	break;
  	      	    } 
  	      	    else 
  	      	    {
  	      	    	System.out.println("-------------- waiting ----------"  + info);  	           
  	      	    }
  	      	}
    	    
  	      	if (queryResults != null) 
  	      	{
  	      	    for (String resultId : queryResults) 
  	      	    {
  	      	    	InputStream resultStream = bulkConnection.getQueryResultStream(job.getId(), info.getId(), resultId);
  	      	    	BufferedReader lineReader = new BufferedReader(new InputStreamReader(resultStream,"UTF8"));
  	      	    	String lineString = null;
  	      	    	lineString = lineReader.readLine();
  	      	    	while((lineString = lineReader.readLine()) != null) 
  	      	    	{
  	      	    	    System.out.println("lineString : " + lineString);
  	      	    	    numberOfRecordsExtracted++;
  	      	    	}
  	      	    }
  	      	}  	      	    

    	  } catch (AsyncApiException aae) {
    	  		aae.printStackTrace();
    	  } catch (UnsupportedEncodingException e) {
				e.printStackTrace();
		  } catch (IOException e) {
				e.printStackTrace();
		}   
    	  
    	while(!getMoreData())
    	{}

   	    System.out.println("Number of batches created with chunkSize 2000 is " + numberOfBatchesForQueryExtract);
	    System.out.println("Number of total records extracted is " + numberOfRecordsExtracted);
	    System.out.println("Final Total Batch Count is " + finalBatchCount);
	    System.out.println("Job state is " + job.getState());
	    long finishTime = System.nanoTime();
	    long elapsedTime = (finishTime - startTime)/1000000; 
	    int seconds = (int)(elapsedTime / 1000) % 60 ;
	    int minutes = (int)((elapsedTime / (1000*60)) % 60);
	    int hours = (int)((elapsedTime / (1000*60*60)) % 24);
	    System.out.println("Time for processing is "+ hours + ":" + minutes + ":" + seconds);
    }
    
    public boolean isQueryDone()
    {
    	try {
			if(finalBatchCount < (bulkConnection.getBatchInfoList(job.getId()).getBatchInfo().length -1))
			{
				return false;
			}
			else
			{
				bulkConnection.closeJob(job.getId());
				return true;
			}
		} catch (AsyncApiException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
    	return true;
    }
    
    
    public boolean getMoreData() 
    {
    	try{
        if ( isQueryDone() )
        {
            return true;
        }
        else
        {
        	bListInfo = bulkConnection.getBatchInfoList(job.getId()).getBatchInfo();
    	    finalBatchCount++;
  	      	BatchInfo info = bListInfo[finalBatchCount];
  	      	numberOfBatchesForQueryExtract++;
    	    
  	      	String[] queryResults = null;

  	      	for(int i=0; i<10000; i++) 
  	      	{
  	      	    info = bulkConnection.getBatchInfo(job.getId(), info.getId());
    	          
  	      	    if (info.getState() == BatchStateEnum.Completed) 
  	      	    {
  	      	    	QueryResultList list = bulkConnection.getQueryResultList(job.getId(),info.getId());             
  	      	    	queryResults = list.getResult();
  	      	    	break;
  	      	    } 
  	      	    else if (info.getState() == BatchStateEnum.Failed) 
  	      	    {
  	      	    	System.out.println("-------------- failed ----------" + info);    	            
  	      	    	break;
  	      	    } 
  	      	    else 
  	      	    {
  	      	    	System.out.println("-------------- waiting ----------"  + info);  	           
  	      	    }
  	      	 }

  	      	 if (queryResults != null) 
  	      	 {
  	      	    for (String resultId : queryResults) 
  	      	    {
  	      	    	InputStream resultStream = bulkConnection.getQueryResultStream(job.getId(), info.getId(), resultId);
  	      	    	BufferedReader lineReader = new BufferedReader(new InputStreamReader(resultStream,"UTF8"));
  	      	    	String lineString = null;
  	      	    	lineReader.readLine();
  	      	    	while((lineString = lineReader.readLine()) != null) 
  	      	    	{
  	      	    		System.out.println("lineString : " + lineString);
  	      	    		numberOfRecordsExtracted++;
  	      	    	}
  	      	    }
  	      	  } 
  	    return false; 	 
        }
    	} catch (AsyncApiException aae) {
    	  	aae.printStackTrace();
    	} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} 
    	return false; 
        }
    
    public String convertInputStreamToString(InputStream inputStream)
	{
		StringBuilder result = new StringBuilder();
		String line; 
		String newLine = System.getProperty("line.separator");
		boolean flag = false;
		try {
		BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream,"UTF8"));

			while ((line = reader.readLine()) != null) {
			    result.append(flag? newLine: "").append(line);
			    flag = true;
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		return result.toString();
	}
    
    
    public static int countLines(String filename)  
    {
        InputStream is = null;
		try {
			is = new BufferedInputStream(new FileInputStream(filename));
	
            byte[] c = new byte[1024];
            int count = 0;
            int readChars = 0;
            boolean empty = true;
            while ((readChars = is.read(c)) != -1) {
                empty = false;
                for (int i = 0; i < readChars; ++i) {
                    if (c[i] == '\n') {
                        ++count;
                    }
                }
            }
            return (count == 0 && !empty) ? 1 : count;
        } catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} finally {
            try {
				is.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
        }
		return 0;
    }
    
}

This source code is compiled and using force-partner-api-39.0.0.jar force-wsc-39.0.0.jar jackson-core-asl-1.9.13.jar jackson-mapper-asl-1.9.13.jar
usama mehboobusama mehboob
Hi Did you solve this issue?