View Javadoc

1   package org.kuali.ole.batch.export;
2   
3   import org.apache.commons.io.FileUtils;
4   import org.apache.commons.lang.StringUtils;
5   import org.apache.log4j.Logger;
6   import org.apache.solr.client.solrj.SolrQuery;
7   import org.apache.solr.client.solrj.response.FacetField;
8   import org.apache.solr.client.solrj.response.QueryResponse;
9   import org.kuali.ole.OLEConstants;
10  import org.kuali.ole.batch.bo.*;
11  import org.kuali.ole.batch.document.OLEBatchProcessDefinitionDocument;
12  import org.kuali.ole.batch.impl.AbstractBatchProcess;
13  import org.kuali.ole.batch.impl.ExportDataServiceImpl;
14  import org.kuali.ole.batch.marc.OLEMarcReader;
15  import org.kuali.ole.batch.marc.OLEMarcXmlReader;
16  import org.kuali.ole.batch.service.ExportDataService;
17  import org.kuali.ole.docstore.discovery.model.SearchCondition;
18  import org.kuali.ole.docstore.discovery.model.SearchParams;
19  import org.kuali.ole.docstore.discovery.service.QueryServiceImpl;
20  import org.kuali.ole.docstore.discovery.service.SolrServerManager;
21  import org.kuali.ole.docstore.discovery.solr.work.bib.WorkBibCommonFields;
22  import org.kuali.ole.docstore.discovery.solr.work.instance.WorkInstanceCommonFields;
23  import org.kuali.ole.docstore.model.enums.DocFormat;
24  import org.kuali.ole.docstore.model.enums.DocType;
25  import org.kuali.rice.krad.service.KRADServiceLocator;
26  import org.marc4j.MarcStreamWriter;
27  import org.marc4j.MarcWriter;
28  import org.marc4j.marc.Record;
29  
30  import java.io.ByteArrayInputStream;
31  import java.io.File;
32  import java.io.FileOutputStream;
33  import java.io.InputStream;
34  import java.nio.file.FileSystems;
35  import java.text.DateFormat;
36  import java.text.ParseException;
37  import java.text.SimpleDateFormat;
38  import java.util.*;
39  
40  import static org.kuali.ole.OLEConstants.OLEBatchProcess.lineSeparator;
41  
42  /**
43   * Created with IntelliJ IDEA.
44   * User: meenrajd
45   * Date: 7/5/13
46   * Time: 5:34 PM
47   * To change this template use File | Settings | File Templates.
48   */
49  public class BatchProcessExportData extends AbstractBatchProcess {
50  
51      private static final Logger LOG = Logger.getLogger(BatchProcessExportData.class);
52  
53      private SolrQuery solrQueryObj;
54      private QueryResponse response;
55      private int start;
56      private Date lastExportDate;
57      private List<String> bibDocList = new ArrayList<String>();
58      protected File filePath;
59      protected String fileName;
60      private int fileCount = 1;
61      private static final String FULL_EXPORT = "(DocType:bibliographic) AND (DocFormat:marc)";
62      private ExportDataService service;
63      private int processedRec;
64      private static final String RANGE = "range";
65      private static final String AND = "AND";
66      private static final String NONE = "none";
67      private StringBuilder errBuilder = new StringBuilder();
68      private String errCnt = "0";
69  
70  
71      /**
72       * The method receives the profile information and performs the solr query to retrive the
73       * solr document list and calls the ExportService to get the export data and writes the
74       * formatted data to the file system
75       *
76       * @return List<String> docList
77       * @throws Exception
78       */
79      private Object[] batchExport(OLEBatchProcessProfileBo profileBo) throws Exception {
80          Object[] resultMap = service.getExportDataBySolr(response.getResults(), profileBo);
81          return resultMap;
82      }
83  
84      /**
85       * Method to export data based on list of bibids
86       *
87       * @param bibIds
88       * @throws Exception
89       */
90      private void batchExport(List<String> bibIds) throws Exception {
91          ExportDataService service = ExportDataServiceImpl.getInstance();
92          service.getExportDataByBibIds(bibIds, processDef.getOleBatchProcessProfileBo());
93      }
94  
95      /**
96       * Gets the filter criteria which is used to create the solr query
97       *
98       * @return solrQuery
99       */
100     private void getSolrQuery() throws Exception {
101         SearchParams params = new SearchParams();
102         solrQueryObj = new SolrQuery();
103         solrQueryObj.setRows(processDef.getChunkSize());
104         solrQueryObj.setFields(WorkBibCommonFields.ID, WorkInstanceCommonFields.BIB_IDENTIFIER,
105                 WorkInstanceCommonFields.INSTANCE_IDENTIFIER, WorkBibCommonFields.DOC_TYPE, WorkBibCommonFields.LOCALID_SEARCH);
106         solrQueryObj.setStart(start);
107         solrQueryObj.setSortField(WorkBibCommonFields.DATE_ENTERED, SolrQuery.ORDER.asc);
108         solrQueryObj.setQuery(getCriteria(params) + QueryServiceImpl.getInstance().buildQuery(params));
109         LOG.info("Solr query for Batch Export profile :: " + processDef.getBatchProcessProfileName() + " :: " + solrQueryObj.getQuery());
110     }
111 
112     /**
113      * adds the filter criteria from the profile to search conditions as field value pair
114      *
115      * @param params
116      */
117     private String getCriteria(SearchParams params) throws ParseException {
118         List<OLEBatchProcessProfileFilterCriteriaBo> criteriaBos = processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileFilterCriteriaList();
119         if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_FULL)) {
120             return FULL_EXPORT;
121         } else if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_EX_STAFF)) {
122             SearchCondition condition = getDefaultCondition();
123             condition.setDocField(WorkBibCommonFields.STAFF_ONLY_FLAG);
124             condition.setSearchText(Boolean.FALSE.toString());
125             params.getSearchFieldsList().add(condition);
126             params.setDocFormat(DocFormat.MARC.getDescription());
127             params.setDocType(DocType.BIB.getDescription());
128             return "";
129         } else if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_INC)) {
130             if (lastExportDate == null) {
131                 processDef.getOleBatchProcessProfileBo().setExportScope(EXPORT_FULL);
132                 return FULL_EXPORT;
133             }
134 
135             SimpleDateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
136             String fromDate = format.format(lastExportDate);
137             SearchCondition condition = getDefaultCondition();
138             condition.setDocField(WorkBibCommonFields.DATE_UPDATED);
139             condition.setSearchText("[" + fromDate + " TO NOW]");
140             condition.setSearchScope(NONE);
141             condition.setFieldType(RANGE);
142             params.getSearchFieldsList().add(condition);
143             if (StringUtils.isNotEmpty(processDef.getBatchProcessProfileBo().getDataToExport()) && processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(EXPORT_BIB_ONLY)) {
144                 params.setDocFormat(DocFormat.MARC.getDescription());
145                 params.setDocType(DocType.BIB.getDescription());
146             }
147             return "";
148         } else {
149             for (OLEBatchProcessProfileFilterCriteriaBo bo : criteriaBos) {
150                 SearchCondition condition = getDefaultCondition();
151                 condition.setDocField(bo.getFilterFieldName());
152                 if (StringUtils.isNotEmpty(bo.getFilterFieldValue())) { // one value
153                     Map<String, String> filterMap = new HashMap<String, String>();
154                     filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
155                     Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
156                     if (!filterBo.iterator().hasNext()) return "";
157                     if (filterBo.iterator().next().getFieldType().equalsIgnoreCase("date")) {
158                         condition.setSearchText("[" + getSolrDate(bo.getFilterFieldValue(), true) + " TO " + getSolrDate(bo.getFilterFieldValue(), false) + "]");
159                         condition.setSearchScope(NONE);
160                         condition.setFieldType(RANGE);
161                     } else {
162                         condition.setSearchText(bo.getFilterFieldValue());
163                     }
164                 } else if (StringUtils.isNotEmpty(bo.getFilterRangeFrom()) && StringUtils.isNotEmpty(bo.getFilterRangeTo())) {
165                     // range values
166                     condition.setFieldType(RANGE);
167                     condition.setSearchScope(NONE);
168                     Map<String, String> filterMap = new HashMap<String, String>();
169                     filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
170                     Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
171                     if (!filterBo.iterator().hasNext()) return "";
172                     if (filterBo.iterator().next().getFieldType().equalsIgnoreCase("date")) {
173                         condition.setSearchText("[" + getSolrDate(bo.getFilterRangeFrom(), true) + " TO " + getSolrDate(bo.getFilterRangeTo(), false) + "]");
174                     } else {
175                         condition.setSearchText("[" + bo.getFilterRangeFrom() + " TO " + bo.getFilterRangeTo() + "]");
176                     }
177                 } else if (StringUtils.isNotEmpty(bo.getFilterRangeFrom()) && StringUtils.isEmpty(bo.getFilterRangeTo())) {   // range values
178                     condition.setFieldType(RANGE);
179                     condition.setSearchScope(NONE);
180                     Map<String, String> filterMap = new HashMap<String, String>();
181                     filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
182                     Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
183                     if (!filterBo.iterator().hasNext()) return "";
184                     if (filterBo.iterator().next().getFieldType().equalsIgnoreCase("date")) {
185                         condition.setSearchText("[" + getSolrDate(bo.getFilterRangeFrom(), true) + " TO NOW]");
186                     } else {
187                         condition.setSearchText("[" + bo.getFilterRangeFrom() + " TO *]");
188                     }
189                 }
190                 //to check if bib status or local id is present in the filter criteria, then select only the bib records by setting export type as full
191                 if (bo.getFilterFieldName().equalsIgnoreCase(WorkBibCommonFields.LOCALID_SEARCH) || bo.getFilterFieldName().equalsIgnoreCase(WorkBibCommonFields.STATUS_SEARCH)
192                         || (StringUtils.isNotEmpty(processDef.getBatchProcessProfileBo().getDataToExport()) && processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(EXPORT_BIB_ONLY))) {
193                     processDef.getOleBatchProcessProfileBo().setExportScope(EXPORT_FULL);
194                     params.setDocFormat(DocFormat.MARC.getDescription());
195                     params.setDocType(DocType.BIB.getDescription());
196                 }
197 
198                 params.getSearchFieldsList().add(condition);
199             }
200             return "";
201         }
202 
203     }
204 
205     @Override
206     protected void prepareForRead() throws Exception {
207         try {
208             OLEBatchProcessProfileBo profileBo = processDef.getBatchProcessProfileBo();
209             profileBo.setFileType(processDef.getOutputFormat());
210             if (job.getStatus().equals(OLEConstants.OLEBatchProcess.JOB_STATUS_PAUSED)) {
211                 start = start + Integer.valueOf(job.getNoOfRecordsProcessed());
212             }
213             performSolrQuery();
214             Object[] resultMap = null;
215             if (response.getResults().getNumFound() > 0) {
216                 service = ExportDataServiceImpl.getInstance();
217                 resultMap = batchExport(profileBo);
218             }
219             if (resultMap == null || resultMap[0].equals("0")) {
220                 job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED);
221             } else {
222                 //bibDocList.add(XML_DEC);
223                 bibDocList.addAll((List<String>) resultMap[1]);
224                 processedRec = Integer.valueOf(resultMap[0].toString());
225                 errBuilder.append(resultMap[2]);
226                 errCnt = resultMap[3].toString();
227             }
228         } catch (Exception ex) {
229             LOG.error("Error while processing data :::", ex);
230             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
231             throw ex;
232         }
233     }
234 
235     /**
236      * gets the next batch of records for export
237      *
238      * @throws Exception
239      */
240     @Override
241     protected void getNextBatch() throws Exception {
242         try {
243             bibDocList.clear();
244             start += processDef.getChunkSize();
245             performSolrQuery();
246             Object[] resultMap = null;
247             OLEBatchProcessProfileBo profileBo = processDef.getBatchProcessProfileBo();
248             profileBo.setFileType(processDef.getOutputFormat());
249             if (response.getResults().getNumFound() > 0) resultMap = batchExport(profileBo);
250             if (resultMap == null || resultMap[0].equals("0")) {
251                 if (start < response.getResults().getNumFound()) {
252                     getNextBatch();
253                 }
254                 //get the deleted records and write to a text file
255                 job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED);
256             } else {
257                 fileCount++;
258                 fileName = processDef.getBatchProcessProfileName() + "_" + fileCount;
259                 //bibDocList.add(XML_DEC);
260                 bibDocList.addAll((List<String>) resultMap[1]);
261                 processedRec = Integer.valueOf(resultMap[0].toString());
262                 errBuilder.append(resultMap[2]);
263                 errCnt = resultMap[3].toString();
264             }
265         } catch (Exception ex) {
266             LOG.error("Error while getNextBatch operation", ex);
267             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
268             throw ex;
269         }
270 
271     }
272 
273     /**
274      * methods creates the directories to write file to if they do not exist
275      * if the user provided folder is not valid (cannot be created) then the default location is chosen
276      *
277      * @throws Exception
278      */
279     @Override
280     protected void prepareForWrite() throws Exception {
281         try {
282             fileName = processDef.getBatchProcessProfileName();
283             if (response.getResults().getNumFound() > processDef.getChunkSize()) {
284                 fileCount += (start / processDef.getChunkSize());
285                 fileName = processDef.getBatchProcessProfileName() + "_" + fileCount;
286             }
287             String homeDirectory = getBatchProcessFilePath(processDef.getBatchProcessType());
288             if (StringUtils.isNotEmpty(processDef.getDestinationDirectoryPath())) {
289                 filePath = new File(processDef.getDestinationDirectoryPath() + FileSystems.getDefault().getSeparator() + job.getJobId());
290             } else if (filePath == null || !filePath.isDirectory()) {
291                 filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getJobId());
292             }
293             if (filePath.isDirectory()) {
294                 //in case of paused and stopped status of job
295                 //job has already run and directory exists
296                 LOG.info("filePath :: " + filePath.getPath() + " ::already exists");
297             } else {
298                 if (filePath.mkdirs()) {
299                     // able to create directory for the given file path
300                     LOG.info("user given filePath :: " + filePath.getPath() + " ::created successfully");
301                 } else {
302                     filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getJobId());
303                     filePath.mkdirs();
304                     LOG.info("default filePath :: " + filePath.getPath() + " ::created");
305                 }
306             }
307             job.setUploadFileName(filePath.getPath());
308         } catch (Exception ex) {
309             LOG.error("Error while prepareForWrite operation", ex);
310             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
311             throw ex;
312         }
313 
314     }
315 
316     /**
317      * Performs the batch write operation
318      * Writes the data to marcxml or mrc format based on the output format specified in the process
319      *
320      * @throws Exception
321      */
322     @Override
323     protected void processBatch() throws Exception {
324         int currSuccessRec = 0;
325         int successRec = Integer.valueOf(job.getNoOfSuccessRecords());
326         int recordProcessed = Integer.valueOf(job.getNoOfRecordsProcessed());
327         int errRecords = Integer.valueOf(job.getNoOfFailureRecords());
328         int currErrCnt = Integer.valueOf(errCnt);
329         if (processDef.getOutputFormat().equalsIgnoreCase(MARCXML)) {
330             try {
331                 if(processedRec>0)
332                     writeFileToLocation();
333                 currSuccessRec = processedRec;
334             } catch (Exception e) {
335                 LOG.error("Error while writing to file:: marcxml ::", e);
336                 job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
337                 job.setStatusDesc("Error while writing to marcxml file::" + fileName + EXT_MARCXML);
338                 currSuccessRec = 0;
339                 currErrCnt += processedRec;
340             }
341         } else if (processDef.getOutputFormat().equalsIgnoreCase(MARC)) {
342             try {
343                 currSuccessRec = generateMarcFromXml();
344                 currErrCnt += processedRec - currSuccessRec;
345             } catch (Exception e) {
346                 LOG.error("Error while writing to file:: mrc ::", e);
347                 job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
348                 job.setStatusDesc("Error while writing to mrc file::" + fileName + EXT_MARC);
349             }
350         }
351         try {
352             writeErrorFile();
353         } catch (Exception ex) {
354             LOG.error("Error while writing to error file", ex);
355             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
356         }
357 
358         job.setNoOfRecordsProcessed(String.valueOf(recordProcessed + currSuccessRec + currErrCnt));
359         job.setNoOfFailureRecords(String.valueOf(errRecords + currErrCnt));
360         job.setNoOfSuccessRecords(String.valueOf(successRec + currSuccessRec));
361         LOG.info(job.getNoOfRecordsProcessed() + " ::records processed");
362         if (currErrCnt > 0)
363             LOG.info(job.getNoOfFailureRecords() + " ::records failed");
364     }
365 
366     /**
367      * Write the content read to mrcxml file
368      *
369      * @throws Exception
370      */
371     private void writeFileToLocation() throws Exception {
372         File file = new File(filePath + FileSystems.getDefault().getSeparator() + fileName + EXT_MARCXML);
373         FileUtils.writeLines(file, "UTF-8", bibDocList);
374     }
375 
376     /**
377      * Writes the content read into a mrc file
378      *
379      * @throws Exception
380      */
381     private int generateMarcFromXml() throws Exception {
382         int successRec = 0;
383         File fileToWrite = new File(filePath + FileSystems.getDefault().getSeparator() + fileName + EXT_MARC);
384         FileOutputStream fileOutputStream = new FileOutputStream(fileToWrite);
385         //String bibContent = StringUtils.join(bibDocList, "");
386         if (!fileToWrite.exists()) {
387             fileToWrite.getParentFile().mkdirs();
388             fileToWrite.createNewFile();
389         }
390         MarcWriter writer = new MarcStreamWriter(fileOutputStream);
391         for(String bibContent : bibDocList){
392             InputStream input = new ByteArrayInputStream(bibContent.getBytes());
393             Record record = null;
394             OLEMarcReader marcXmlReader = new OLEMarcXmlReader(input);
395             try {
396                 while (marcXmlReader.hasNext()) {
397                     if(marcXmlReader.hasErrors()){
398                         marcXmlReader.next();
399                         errBuilder.append(marcXmlReader.getError().toString()).append(lineSeparator);
400                         marcXmlReader.clearErrors();
401                         continue;
402                     }
403                     record = marcXmlReader.next();
404                     writer.write(record);
405                     successRec++;
406                 }
407 
408             } catch (Exception ex) {
409                 LOG.error("Error while parsing MARCXML to mrc data:: " + (record == null ? "NULL_RECORD" : "record id:: " + record.getId()), ex);
410                /* errBuilder.append(ERR_BIB).append(record == null ? "ERROR_RECORD" : record.getId()).append(TIME_STAMP)
411                         .append(new Date()).append(ERR_CAUSE).append(ex.getMessage()).append(" ::At:: ").append("generateMarcFromXml()").append(lineSeparator);*/
412             }
413         }
414         writer.close();
415         return successRec;
416     }
417 
418     /**
419      * Converts the given date string to solr date format
420      * // convert the format to yyyy-MM-dd'T'HH:mm:ss'Z'
421      *
422      * @param dateStr
423      * @param isFrom
424      * @return
425      */
426     private String getSolrDate(String dateStr, boolean isFrom) throws ParseException {
427         SimpleDateFormat solrDtFormat = new SimpleDateFormat(SOLR_DT_FORMAT);
428         SimpleDateFormat userFormat = new SimpleDateFormat(FILTER_DT_FORMAT);
429         try {
430             if (isFrom) {
431                 Date date = userFormat.parse(dateStr);
432                 Calendar cal = Calendar.getInstance();
433                 cal.setTime(date);
434                 cal.set(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DATE), 0, 0, 0);
435                 return solrDtFormat.format(cal.getTime());
436             } else {
437                 Date date = userFormat.parse(dateStr);
438                 Calendar cal = Calendar.getInstance();
439                 cal = Calendar.getInstance();
440                 cal.setTime(date);
441                 cal.set(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DATE), 23, 59, 59);
442                 return solrDtFormat.format(cal.getTime());
443             }
444         } catch (ParseException e) {
445             LOG.error("Error while parsing user entered date::" + dateStr, e);
446             throw e;
447         }
448     }
449 
450     /**
451      * loads the profile and checks for incremental export
452      *
453      * @param processDef
454      */
455     @Override
456     protected void loadProfile(OLEBatchProcessDefinitionDocument processDef) throws Exception {
457         super.loadProfile(processDef);
458         List<OLEBatchProcessProfileMappingOptionsBo> optionsBoList = processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileMappingOptionsList();
459         for (OLEBatchProcessProfileMappingOptionsBo bo : optionsBoList) {
460             processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileDataMappingOptionsBoList().addAll(bo.getOleBatchProcessProfileDataMappingOptionsBoList());
461         }
462         try {
463             if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_INC)) {
464                 String jobId_FL = "start_time";
465                 Map<String, String> jobBoMap = new HashMap<String, String>();
466                 jobBoMap.put("bat_prfle_nm", processDef.getBatchProcessProfileName());
467                 jobBoMap.put("status", OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED);
468                 List<OLEBatchProcessJobDetailsBo> jobDetailsBos = (List<OLEBatchProcessJobDetailsBo>) KRADServiceLocator.getBusinessObjectService().findMatchingOrderBy(OLEBatchProcessJobDetailsBo.class, jobBoMap, jobId_FL, true);
469                 Calendar cal = Calendar.getInstance();
470                 if (jobDetailsBos == null) {
471                     lastExportDate = null;
472                     return;
473                 }
474                 switch (jobDetailsBos.size()) {
475                     case 0:
476                         lastExportDate = null;
477                         break;
478                     case 1:
479                     default:
480                         cal.setTime(jobDetailsBos.get(jobDetailsBos.size() - 1).getStartTime());
481                         lastExportDate = getUTCTime(cal.getTime());
482                 }
483 
484                 LOG.info("Incremental export running for batch export profile :: " + processDef.getBatchProcessProfileName() + " :: with date ::" + lastExportDate);
485             }
486         } catch (Exception ex) {
487             LOG.error("Error while retrieving job details for incremental export::");
488             throw ex;
489         }
490     }
491 
492     /**
493      * gets the solr query based on the filter criteria and executed to retrive the results
494      *
495      * @throws Exception
496      */
497     private void performSolrQuery() throws Exception {
498         try {
499             if (solrQueryObj == null) {
500                 getSolrQuery();
501                 if (!processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_FULL)) {
502                     solrQueryObj.setFacet(true);
503                     solrQueryObj.setFacetMinCount(1);
504                     solrQueryObj.addFacetField(WorkInstanceCommonFields.BIB_IDENTIFIER);
505                     response = SolrServerManager.getInstance().getSolrServer().query(solrQueryObj);
506                     FacetField facetFieldBib = response.getFacetField(WorkInstanceCommonFields.BIB_IDENTIFIER);
507                     if (facetFieldBib.getValues() == null) {
508                         if (response.getResults().getNumFound() == 0) {
509                             job.setTotalNoOfRecords("0");
510                         } else {
511                             job.setTotalNoOfRecords(String.valueOf(response.getResults().getNumFound()));
512                         }
513                         return;
514                     }
515                     List<FacetField.Count> count = facetFieldBib.getValues();
516                     long bibCount = 0;
517                     for (FacetField.Count c : count) {
518                         bibCount += c.getCount();
519                     }
520                     job.setTotalNoOfRecords(String.valueOf(count.size()));
521                 } else {
522                     response = SolrServerManager.getInstance().getSolrServer().query(solrQueryObj);
523                     job.setTotalNoOfRecords(String.valueOf(response.getResults().getNumFound()));
524                 }
525                 LOG.info("Total number of records to be exported :: " + job.getTotalNoOfRecords());
526 
527             } else {
528                 solrQueryObj.setStart(start);
529                 solrQueryObj.setFacet(false);
530                 response = SolrServerManager.getInstance().getSolrServer().query(solrQueryObj);
531             }
532 
533 
534         } catch (Exception e) {
535             LOG.error("Error while performing solr query :: ", e);
536             throw e;
537         }
538     }
539 
540     /**
541      * converts the given date to UTC time
542      *
543      * @param date
544      * @return
545      * @throws ParseException
546      */
547     private static Date getUTCTime(Date date) throws ParseException {
548         DateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
549         format.setTimeZone(TimeZone.getTimeZone("UTC"));
550         String utcStr = format.format(date);
551         DateFormat format2 = new SimpleDateFormat(SOLR_DT_FORMAT);
552         Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
553         cal.setTime(format2.parse(utcStr));
554         return cal.getTime();
555     }
556 
557     /**
558      * gets the default search condition
559      *
560      * @return
561      */
562     private SearchCondition getDefaultCondition() {
563         SearchCondition condition = new SearchCondition();
564         condition.setSearchScope(AND);
565         condition.setOperator(AND);
566         return condition;
567     }
568 
569     /**
570      * method to write deleted records to a file
571      *
572      * @param deletedRecords
573      */
574     private void writeDeletedRecords(List<String> deletedRecords) {
575         //read deleted records from the delete audit table
576         // get the file path
577         String deleteFileName = processDef.getBatchProcessProfileName() + "_deleted.txt";
578         File fileToWrite = new File(filePath + FileSystems.getDefault().getSeparator() + deleteFileName);
579         // write to the file the data
580     }
581 
582     /**
583      * Writes the error records to the path
584      *
585      * @throws Exception
586      */
587     private void writeErrorFile() throws Exception {
588         if (StringUtils.isNotEmpty(errBuilder.toString())) {
589             File file = new File(filePath + FileSystems.getDefault().getSeparator() + processDef.getBatchProcessProfileName() + EXT_ERR_TXT);
590             FileUtils.writeStringToFile(file, errBuilder.toString(), "UTF-8", true);
591         }
592     }
593 }