View Javadoc
1   package org.kuali.ole.batch.export;
2   
3   import org.apache.commons.collections.CollectionUtils;
4   import org.apache.commons.io.FileUtils;
5   import org.apache.commons.lang.StringUtils;
6   import org.apache.commons.lang.time.StopWatch;
7   import org.apache.log4j.Logger;
8   import org.kuali.ole.DocumentUniqueIDPrefix;
9   import org.kuali.ole.OLEConstants;
10  import org.kuali.ole.batch.bo.*;
11  import org.kuali.ole.batch.document.OLEBatchProcessDefinitionDocument;
12  import org.kuali.ole.batch.helper.InstanceMappingHelper;
13  import org.kuali.ole.batch.impl.*;
14  import org.kuali.ole.batch.marc.OLEMarcReader;
15  import org.kuali.ole.batch.marc.OLEMarcXmlReader;
16  import org.kuali.ole.batch.service.ExportDataService;
17  import org.kuali.ole.docstore.common.client.DocstoreClientLocator;
18  import org.kuali.ole.docstore.common.document.*;
19  import org.kuali.ole.docstore.common.document.content.enums.DocFormat;
20  import org.kuali.ole.docstore.common.search.*;
21  import org.kuali.ole.docstore.common.util.BatchBibTreeDBUtil;
22  import org.kuali.ole.docstore.model.enums.DocType;
23  import org.kuali.ole.sys.context.SpringContext;
24  import org.kuali.rice.core.api.config.property.ConfigContext;
25  import org.kuali.rice.krad.service.KRADServiceLocator;
26  import org.marc4j.MarcStreamWriter;
27  import org.marc4j.MarcWriter;
28  import org.marc4j.marc.ControlField;
29  import org.marc4j.marc.Record;
30  import org.marc4j.marc.VariableField;
31  
32  import java.io.ByteArrayInputStream;
33  import java.io.File;
34  import java.io.FileOutputStream;
35  import java.io.InputStream;
36  import java.nio.file.FileSystems;
37  import java.text.DateFormat;
38  import java.text.ParseException;
39  import java.text.SimpleDateFormat;
40  import java.util.*;
41  import java.util.Date;
42  
43  import static org.kuali.ole.OLEConstants.OLEBatchProcess.*;
44  
45  /**
46   * Created with IntelliJ IDEA.
47   * User: meenrajd
48   * Date: 7/5/13
49   * Time: 5:34 PM
50   * <p/>
51   * OLE Batch export process performs the export process for the given profile. Writes the exported data to marcxml or mrc file based on file type
52   * in the process definition
53   */
54  public class BatchProcessExportData extends AbstractBatchProcess {
55  
56      private static final Logger LOG = Logger.getLogger(BatchProcessExportData.class);
57  
58      private int start;
59      private Date lastExportDate;
60      private List<String> bibDocList = new ArrayList<String>();
61      private File filePath;
62      private String fileName;
63      private int fileCount = 1;
64      private static final String FULL_EXPORT = "(DocType:bibliographic) AND (DocFormat:marc)";
65      private ExportDataService service;
66      private int processedRec;
67      private static final String RANGE = "range";
68      private static final String AND = "AND";
69      private static final String NONE = "none";
70      private static final String PHRASE = "phrase";
71      private static final String OR = "OR";
72      private StringBuilder errBuilder;
73      private String errCnt = "0";
74      private static final String applicationUrl = ConfigContext.getCurrentContextConfig().getProperty(OLEConstants.OLEBatchProcess.BATCH_EXPORT_PATH_APP_URL);
75      private static final String homeDirectory = ConfigContext.getCurrentContextConfig().getProperty(OLEConstants.USER_HOME_DIRECTORY);
76      private SearchParams searchParams;
77      private SearchResponse response;
78      private OLEBatchProcessProfileFilterCriteriaBo staffOnlyCriteriaBo = null;
79      private DocstoreClientLocator docstoreClientLocator;
80      private SimpleDateFormat dateTimeFormat=new SimpleDateFormat("yyyy-MM-dd'T'HHmm");
81  
82  
83  
84  
85  
86      private int totalRecordsExported;
87  
88      public DocstoreClientLocator getDocstoreClientLocator() {
89          if (null == docstoreClientLocator) {
90              return SpringContext.getBean(DocstoreClientLocator.class);
91          }
92          return docstoreClientLocator;
93      }
94  
95  
96      /**
97       * The method receives the profile information and performs the solr query to retrieve the
98       * solr document list and calls the ExportService to get the export data and writes the
99       * formatted data to the file system
100      *
101      * @return List<String> docList
102      * @throws Exception
103      */
104     public Object[] batchExport(OLEBatchProcessProfileBo profileBo) throws Exception {
105         return service.getExportDataBySolr(response.getSearchResults(), profileBo);
106     }
107 
108 
109     /**
110      * Gets the filter criteria which is used to create the solr query
111      *
112      * @return solrQuery
113      */
114     public void getSolrQuery() throws Exception {
115         searchParams = new SearchParams();
116         searchParams.setStartIndex(start);
117         searchParams.setPageSize(this.processDef.getChunkSize());
118         searchParams.getSearchResultFields().addAll(getSearchResultFeilds());
119         SortCondition sortCondition = new SortCondition();
120         sortCondition.setSortField("dateEntered");
121         sortCondition.setSortOrder("asc");
122         searchParams.getSortConditions().add(sortCondition);
123         getCriteria();
124     }
125 
126     private List<SearchResultField> getSearchResultFeilds() {
127         List<SearchResultField> searchResultFields = new ArrayList<>();
128         List<String> resultFields = new ArrayList();
129         //resultFields.add("id");
130         resultFields.add("bibIdentifier");
131         resultFields.add("holdingsIdentifier");
132         resultFields.add("DocType");
133         resultFields.add("LocalId_display");
134         for (String resultField : resultFields) {
135             SearchResultField searchResultField = new SearchResultField();
136             searchResultField.setDocType("bibliographic");
137             searchResultField.setFieldName(resultField);
138             searchResultFields.add(searchResultField);
139         }
140         return searchResultFields;
141     }
142 
143     public SearchResponse getDeleteSolrQuery() throws Exception {
144         searchParams = new SearchParams();
145         searchParams.setStartIndex(start);
146         searchParams.setPageSize(processDef.getChunkSize());
147         SearchResultField searchResultField = new SearchResultField();
148         searchResultField.setDocType("bibliographic_delete");
149         searchResultField.setFieldName("LocalId_display");
150         searchParams.getSearchResultFields().add(searchResultField);
151         SortCondition sortCondition = new SortCondition();
152         sortCondition.setSortField("dateEntered");
153         sortCondition.setSortOrder("asc");
154         searchParams.getSortConditions().add(sortCondition);
155         getDeleteCriteria();
156         response = getDocstoreClientLocator().getDocstoreClient().search(searchParams);
157         return response;
158     }
159 
160     public SearchResponse getIncrementalSolrQuery() throws Exception {
161         SimpleDateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
162         String fromDate = format.format(lastExportDate);
163         searchParams = new SearchParams();
164         SearchField searchField = searchParams.buildSearchField(null, "dateUpdated", "[" + fromDate + " TO NOW]");
165         searchParams.getSearchConditions().add(searchParams.buildSearchCondition(NONE, searchField, "AND"));
166         searchParams.getSearchResultFields().add(searchParams.buildSearchResultField(null, "bibIdentifier"));
167         searchField = searchParams.buildSearchField(null, "staffOnlyFlag", Boolean.TRUE.toString());
168         searchParams.getSearchConditions().add(searchParams.buildSearchCondition(NONE, searchField, "AND"));
169         response = getDocstoreClientLocator().getDocstoreClient().search(searchParams);
170         return response;
171     }
172 
173     /**
174      * adds the filter criteria from the profile to search conditions as field value pair
175      */
176     private String getCriteria() throws ParseException {
177         List<OLEBatchProcessProfileFilterCriteriaBo> criteriaBos = processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileFilterCriteriaList();
178         if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_INC)
179                 || processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(INCREMENTAL_EXPORT_EX_STAFF)) {
180             SearchCondition condition = getDefaultCondition();
181             SearchField searchField = new SearchField();
182             SearchCondition conditionStaffOnly = getDefaultCondition();
183             SearchField searchFieldStaffOnly = new SearchField();
184 
185             SimpleDateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
186             String fromDate = format.format(lastExportDate);
187             if (StringUtils.isNotEmpty(processDef.getBatchProcessProfileBo().getDataToExport())
188                     && processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(EXPORT_BIB_ONLY)) {
189                 searchFieldStaffOnly.setDocType(DocType.BIB.getDescription());
190                 searchField.setDocType(DocType.BIB.getDescription());
191             }
192             searchField.setFieldName("dateUpdated");
193             searchField.setFieldValue("[" + fromDate + " TO NOW]");
194             condition.setSearchScope(NONE);
195             condition.setSearchField(searchField);
196             searchParams.getSearchConditions().add(condition);
197 
198             if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(INCREMENTAL_EXPORT_EX_STAFF)) {
199                 searchFieldStaffOnly.setFieldName("staffOnlyFlag");
200                 searchFieldStaffOnly.setFieldValue(Boolean.FALSE.toString());
201                 conditionStaffOnly.setSearchScope(NONE);
202                 conditionStaffOnly.setSearchField(searchFieldStaffOnly);
203                 searchParams.getSearchConditions().add(conditionStaffOnly);
204             }
205             return "";
206         } else if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_FILTER)) {
207             return getFilterCriteria(criteriaBos);
208         } else {
209             return "";
210         }
211     }
212 
213     private String getDeleteCriteria() throws ParseException {
214         SimpleDateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
215         String fromDate = format.format(lastExportDate);
216         SearchCondition condition = getDefaultCondition();
217         SearchField searchField = new SearchField();
218         searchField.setDocType("bibliographic_delete");
219         searchField.setFieldName("dateUpdated");
220         searchField.setFieldValue("[" + fromDate + " TO NOW]");
221         condition.setSearchScope(NONE);
222         //condition.setOperator(RANGE);
223         condition.setSearchField(searchField);
224         searchParams.getSearchConditions().add(condition);
225         return "";
226     }
227 
228     @Override
229     protected void prepareForRead() throws Exception {
230         try {
231             errBuilder = new StringBuilder();
232             OLEBatchProcessProfileBo profileBo = processDef.getBatchProcessProfileBo();
233             profileBo.setFileType(processDef.getOutputFormat());
234             if (job.getStatus().equals(OLEConstants.OLEBatchProcess.JOB_STATUS_PAUSED)) {
235                 start = start + Integer.valueOf(job.getNoOfRecordsProcessed());
236             }
237           performSolrQuery();
238 
239             if(!profileBo.getExportScope().equalsIgnoreCase(EXPORT_FULL) && !profileBo.getExportScope().equalsIgnoreCase(EXPORT_EX_STAFF)) {
240                 updateJobProgress();
241                 incrementalFilterExport(profileBo);
242             } else{
243                 batchProcessExportFetch();
244                 if(!job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_CANCELLED)
245                         && !job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_PAUSED)
246                         && !job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED)){
247                     job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED);
248                 }
249                 updateJobProgress();
250             }
251         } catch (Exception ex) {
252             LOG.error("Error while processing data :::", ex);
253             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED);
254             throw ex;
255         }
256     }
257 
258     /**
259      * gets the next batch of records for export
260      *
261      * @throws Exception
262      */
263     @Override
264     protected void getNextBatch() throws Exception {
265         try {
266             errBuilder = new StringBuilder();
267             bibDocList.clear();
268             start += processDef.getChunkSize();
269             if (start > response.getTotalRecordCount()) {//no more next batch
270                 job.setStatus(JOB_STATUS_COMPLETED);
271                 return;
272             }
273             performSolrQuery();
274             Object[] resultMap = null;
275             OLEBatchProcessProfileBo profileBo = processDef.getBatchProcessProfileBo();
276             profileBo.setFileType(processDef.getOutputFormat());
277             if (response.getSearchResults().size() > 0) resultMap = batchExport(profileBo);
278             if (resultMap == null || resultMap[0].equals("0")) {
279                 if (start < response.getTotalRecordCount()) {
280                     getNextBatch();
281                 }
282                 job.setStatus(JOB_STATUS_COMPLETED);
283             } else {
284                 fileCount++;
285                 if(StringUtils.isNotBlank(processDef.getDestinationDirectoryPath()))
286                     fileName = processDef.getDestinationDirectoryPath() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
287                 else if(StringUtils.isNotBlank(processDef.getBatchProcessName()))
288                     fileName = processDef.getBatchProcessName() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
289                 else
290                     fileName = job.getJobId() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
291                 //fileName = processDef.getBatchProcessProfileName() + "_" + fileCount;
292                 //bibDocList.add(XML_DEC);
293                 bibDocList.addAll((List<String>) resultMap[1]);
294                 processedRec = Integer.valueOf(resultMap[0].toString());
295             }
296             if (!job.getStatus().equals(JOB_STATUS_RUNNING) && errBuilder.length() != 0) {
297                 job.setStatusDesc("Batch Completed with Errors :: See Error file for details");
298             }
299             if (resultMap != null) {
300                 if (resultMap[2] != null)
301                     errBuilder.append(resultMap[2].toString());
302                 if (resultMap[3] != null)
303                     errCnt = resultMap[3].toString();
304             }
305         } catch (Exception ex) {
306             LOG.error("Error while getNextBatch operation", ex);
307             job.setStatus(JOB_STATUS_STOPPED);
308             throw ex;
309         }
310 
311     }
312 
313     /**
314      * methods creates the directories to write file to if they do not exist
315      * if the user provided folder is not valid (cannot be created) then the default location is chosen
316      *
317      * @throws Exception
318      */
319     @Override
320     protected void prepareForWrite() throws Exception {
321         dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
322         try {
323             if(StringUtils.isNotBlank(processDef.getDestinationDirectoryPath()))
324                 fileName = processDef.getDestinationDirectoryPath()+ "-" + dateTimeFormat.format(new Date());
325             else if(StringUtils.isNotBlank(processDef.getBatchProcessName()))
326                 fileName = processDef.getBatchProcessName()+ "-" + dateTimeFormat.format(new Date());
327             else
328                 fileName = job.getJobId()+ "-" + dateTimeFormat.format(new Date());
329             if (response.getSearchResults().size() >= processDef.getChunkSize()) {
330                 fileCount += (start / processDef.getChunkSize());
331                 //fileName = processDef.getBatchProcessProfileName() + "_" + fileCount;
332                 if(StringUtils.isNotBlank(processDef.getDestinationDirectoryPath()))
333                     fileName = processDef.getDestinationDirectoryPath()+ "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
334                 else if(StringUtils.isNotBlank(processDef.getBatchProcessName()))
335                     fileName = processDef.getBatchProcessName() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
336                 else
337                     fileName = job.getJobId() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART  + fileCount;
338             }
339             String homeDirectory = getBatchProcessFilePath(processDef.getBatchProcessType());
340             filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
341             /*if (StringUtils.isNotEmpty(processDef.getDestinationDirectoryPath())) {
342                 filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + processDef.getDestinationDirectoryPath());
343             } else if (filePath == null || !filePath.isDirectory()) {
344                 filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
345             }*/
346             if (filePath.isDirectory()) {
347                 //in case of paused and stopped status of job
348                 //job has already run and directory exists
349                 //LOG.info("filePath :: " + filePath.getPath() + " ::already exists");
350             } else {
351                 if (filePath.mkdirs()) {
352                     // able to create directory for the given file path
353                     LOG.info("user given filePath :: " + filePath.getPath() + " ::created successfully");
354                 } else {
355                     filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
356                     if (filePath.mkdirs())
357                         LOG.info("default filePath :: " + filePath.getPath() + " ::created");
358                     else {
359                         LOG.error("Cannot create output directory for the given file path:: " + filePath.getPath());
360                         job.setStatus(JOB_STATUS_STOPPED);
361                         throw new RuntimeException("Cannot create output directory for the given file path:: " + filePath.getPath());
362                     }
363                 }
364             }
365             job.setUploadFileName(filePath.getPath());
366         } catch (Exception ex) {
367             LOG.error("Error while prepareForWrite operation", ex);
368             job.setStatus(JOB_STATUS_STOPPED);
369             throw ex;
370         }
371 
372     }
373 
374 
375     public void prepareForWrite(String fileName) throws Exception {
376         dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
377         try {
378             processDef.setBatchProcessProfileName(fileName);
379             if (response.getSearchResults().size() > processDef.getChunkSize()) {
380                 fileCount += (start / processDef.getChunkSize());
381                 fileName = processDef.getBatchProcessProfileName() + "_" + fileCount;
382             }
383             String homeDirectory = getBatchProcessFilePath(processDef.getBatchProcessType());
384             filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
385             /*if (StringUtils.isNotEmpty(processDef.getDestinationDirectoryPath())) {
386                 filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
387             } else if (filePath == null || !filePath.isDirectory()) {
388                 filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
389             }*/
390             if (filePath.isDirectory()) {
391                 //in case of paused and stopped status of job
392                 //job has already run and directory exists
393                 //LOG.info("filePath :: " + filePath.getPath() + " ::already exists");
394             } else {
395                 if (filePath.mkdirs()) {
396                     // able to create directory for the given file path
397                     LOG.info("user given filePath :: " + filePath.getPath() + " ::created successfully");
398                 } else {
399                     filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getBatchProfileName());
400                     if (filePath.mkdirs())
401                         LOG.info("default filePath :: " + filePath.getPath() + " ::created");
402                     else {
403                         LOG.error("Cannot create output directory for the given file path:: " + filePath.getPath());
404                         job.setStatus(JOB_STATUS_STOPPED);
405                         throw new RuntimeException("Cannot create output directory for the given file path:: " + filePath.getPath());
406                     }
407                 }
408             }
409             job.setUploadFileName(filePath.getPath());
410         } catch (Exception ex) {
411             LOG.error("Error while prepareForWrite operation", ex);
412             job.setStatus(JOB_STATUS_STOPPED);
413             throw ex;
414         }
415 
416     }
417 
418     /**
419      * Performs the batch write operation
420      * Writes the data to marcxml or mrc format based on the output format specified in the process
421      *
422      * @throws Exception
423      */
424     @Override
425     protected void processBatch() throws Exception {
426         int currSuccessRec = 0;
427         int successRec = Integer.valueOf(job.getNoOfSuccessRecords());
428         int recordProcessed = Integer.valueOf(job.getNoOfRecordsProcessed());
429         int errRecords = Integer.valueOf(job.getNoOfFailureRecords());
430         int currErrCnt = Integer.valueOf(errCnt);
431         if (processDef.getOutputFormat().equalsIgnoreCase(MARCXML)) {
432             try {
433                 if (processedRec > 0)
434                     writeFileToLocation();
435                 currSuccessRec = processedRec;
436             } catch (Exception e) {
437                 LOG.error("Error while writing to file:: marcxml ::", e);
438                 job.setStatus(JOB_STATUS_STOPPED);
439                 job.setStatusDesc("Error while writing to marcxml file::" + fileName + EXT_MARCXML);
440                 currSuccessRec = 0;
441                 currErrCnt += processedRec;
442             }
443         } else if (processDef.getOutputFormat().equalsIgnoreCase(MARC)) {
444             try {
445                 currSuccessRec = generateMarcFromXml();
446                 currErrCnt += processedRec - currSuccessRec;
447             } catch (Exception e) {
448                 LOG.error("Error while writing to file:: mrc ::", e);
449             }
450         }
451         try {
452             writeErrorFile();
453         } catch (Exception ex) {
454             LOG.error("Error while writing to error file", ex);
455             job.setStatus(JOB_STATUS_STOPPED);
456         }
457 
458         job.setNoOfRecordsProcessed(String.valueOf(recordProcessed + currSuccessRec + currErrCnt));
459         //job.setTotalNoOfRecords(String.valueOf(response.getTotalRecordCount()));
460         job.setNoOfFailureRecords(String.valueOf(errRecords + currErrCnt));
461         job.setNoOfSuccessRecords(String.valueOf(successRec + currSuccessRec));
462         LOG.debug(job.getNoOfRecordsProcessed() + " ::records processed");
463         if (currErrCnt > 0)
464             LOG.debug(job.getNoOfFailureRecords() + " ::records failed");
465     }
466 
467     /**
468      * Write the content read to mrcxml file
469      *
470      * @throws Exception
471      */
472     public void writeFileToLocation() throws Exception {
473         File file = new File(filePath + FileSystems.getDefault().getSeparator() + fileName + EXT_MARCXML);
474         FileUtils.writeLines(file, "UTF-8", bibDocList, true);
475     }
476 
477     public void writeFileToLocation(String fileName) throws Exception {
478         File file = new File(filePath + FileSystems.getDefault().getSeparator() + fileName + EXT_MARCXML);
479         FileUtils.writeLines(file, "UTF-8", bibDocList, true);
480     }
481 
482     /**
483      * Writes the content read into a mrc file
484      *
485      * @throws Exception
486      */
487     public int generateMarcFromXml() throws Exception {
488         StopWatch timer = new StopWatch();
489         timer.start();
490         int successRec = 0;
491         File fileToWrite = new File(filePath + FileSystems.getDefault().getSeparator() + fileName + EXT_MARC);
492         FileOutputStream fileOutputStream = new FileOutputStream(fileToWrite, true);
493         //String bibContent = StringUtils.join(bibDocList, "");
494         if (!fileToWrite.exists()) {
495             if (fileToWrite.getParentFile().mkdirs() && fileToWrite.createNewFile()) {
496                 //do nothing
497             } else {
498                 LOG.error("Cannot create mrc file in the given file path :: " + fileToWrite.getPath());
499                 job.setStatus(JOB_STATUS_STOPPED);
500                 throw new RuntimeException("Cannot create mrc file in the given file path :: " + fileToWrite.getPath());
501             }
502         }
503         MarcWriter writer = new MarcStreamWriter(fileOutputStream, "UTF-8");
504         int errorCount = 0;
505         for (String bibContent : bibDocList) {
506             InputStream input = new ByteArrayInputStream(bibContent.getBytes());
507             Record record = null;
508             OLEMarcReader marcXmlReader = new OLEMarcXmlReader(input);
509             try {
510                 while (marcXmlReader.hasNext()) {
511                     if (marcXmlReader.hasErrors()) {
512                         marcXmlReader.next();
513                         errBuilder.append(marcXmlReader.getError().toString()).append(lineSeparator);
514                         errorCount++;
515                         marcXmlReader.clearErrors();
516                         continue;
517                     }
518                     record = marcXmlReader.next();
519                     writer.write(record);
520                     successRec++;
521                 }
522 
523             } catch (Exception ex) {
524                 String recordId = getRecordId(record);
525                 LOG.error("Error while parsing MARCXML to mrc data:: " + (recordId == null ? "NULL_RECORD" : "record id:: " + recordId), ex);
526                 errBuilder.append(ERR_BIB).append(recordId == null ? "ERROR_RECORD" : recordId).append(TIME_STAMP)
527                         .append(new Date()).append(ERR_CAUSE).append(ex.getMessage()).append(" ::At:: ").append("generateMarcFromXml() For Record ::"+bibContent).append(lineSeparator);
528             }
529         }
530         writer.close();
531         timer.stop();
532         return successRec;
533     }
534 
535     /**
536      * Converts the given date string to solr date format
537      * // convert the format to yyyy-MM-dd'T'HH:mm:ss'Z'
538      *
539      * @param dateStr
540      * @param isFrom
541      * @return
542      */
543     private String getSolrDate(String dateStr, boolean isFrom) throws ParseException {
544         SimpleDateFormat solrDtFormat = new SimpleDateFormat(SOLR_DT_FORMAT);
545         SimpleDateFormat userFormat = new SimpleDateFormat(FILTER_DT_FORMAT);
546         try {
547             if (isFrom) {
548                 Date date = userFormat.parse(dateStr);
549                 Calendar cal = Calendar.getInstance();
550                 cal.setTime(date);
551                 cal.set(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DATE), 0, 0, 0);
552                 return solrDtFormat.format(cal.getTime());
553             } else {
554                 Date date = userFormat.parse(dateStr);
555                 Calendar cal = Calendar.getInstance();
556                 cal.setTime(date);
557                 cal.set(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DATE), 23, 59, 59);
558                 return solrDtFormat.format(cal.getTime());
559             }
560         } catch (ParseException e) {
561             LOG.error("Error while parsing user entered date::" + dateStr, e);
562             throw e;
563         }
564     }
565 
566     /**
567      * loads the profile and checks for incremental export
568      *
569      * @param processDef
570      */
571     @Override
572     protected void loadProfile(OLEBatchProcessDefinitionDocument processDef) throws Exception {
573         super.loadProfile(processDef);
574         if (processDef.getBatchProcessType().equalsIgnoreCase(OLEConstants.OLEBatchProcess.BATCH_EXPORT) && processDef.getLoadIdFromFile().equalsIgnoreCase("true")) {
575             String batchProcessFileContent = getBatchProcessFileContent();
576             if (processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileFilterCriteriaList().size() == 1 && processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileFilterCriteriaList().get(0).getFilterFieldName().equalsIgnoreCase(LOCAL_ID_DISPLAY)) {
577                 processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileFilterCriteriaList().get(0).setFilterFieldValue(batchProcessFileContent);
578             }
579         }
580         List<OLEBatchProcessProfileMappingOptionsBo> optionsBoList = processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileMappingOptionsList();
581         for (OLEBatchProcessProfileMappingOptionsBo bo : optionsBoList) {
582             processDef.getOleBatchProcessProfileBo().getOleBatchProcessProfileDataMappingOptionsBoList().addAll(bo.getOleBatchProcessProfileDataMappingOptionsBoList());
583         }
584         try {
585             if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_INC)  || processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(INCREMENTAL_EXPORT_EX_STAFF) ) {
586                 Calendar cal = Calendar.getInstance();
587                 String jobId_FL = "start_time";
588                 Map<String, String> jobBoMap = new HashMap<>();
589                 jobBoMap.put("bat_prfle_nm", processDef.getBatchProcessProfileName());
590                 jobBoMap.put("status", JOB_STATUS_COMPLETED);
591                 // Gets adhoc jobs and scheduled jobs that ran with the profile name and status completed.
592                 List<OLEBatchProcessJobDetailsBo> jobDetailsBos = (List<OLEBatchProcessJobDetailsBo>) KRADServiceLocator.getBusinessObjectService().findMatchingOrderBy(OLEBatchProcessJobDetailsBo.class, jobBoMap, jobId_FL, true);
593                 if (jobDetailsBos == null || CollectionUtils.isEmpty(jobDetailsBos)) {
594                     jobId_FL = "CRTE_TIME";
595                     jobBoMap.clear();
596                     jobBoMap.put("batchProcessId", processDef.getBatchProcessId());
597                     // Gets scheduled job running for the first time to set the creation time as last export date.
598                     List<OLEBatchProcessScheduleBo> scheduleBos = (List<OLEBatchProcessScheduleBo>) KRADServiceLocator.getBusinessObjectService().findMatchingOrderBy(OLEBatchProcessScheduleBo.class, jobBoMap, jobId_FL, true);
599                     if (scheduleBos == null || CollectionUtils.isEmpty(scheduleBos)) {
600                         // Job start time is set as last export date for the adhoc jobs that are running for the first time.
601                         cal.setTime(job.getStartTime());
602                     } else {
603                         cal.setTime(scheduleBos.get(scheduleBos.size() - 1).getCreateTime());
604                     }
605                 } else {
606                     cal.setTime(jobDetailsBos.get(jobDetailsBos.size() - 1).getStartTime());
607                 }
608                 lastExportDate = getUTCTime(cal.getTime());
609 
610                 LOG.info("Incremental export running for batch export profile :: " + processDef.getBatchProcessProfileName() + " :: with date ::" + lastExportDate);
611             }
612         } catch (Exception ex) {
613             LOG.error("Error while retrieving job details for incremental export::");
614             throw ex;
615         }
616     }
617 
618     /**
619      * gets the solr query based on the filter criteria and executed to retrieve the results
620      *
621      * @throws Exception
622      */
623     public void performSolrQuery() throws Exception {
624         try {
625             if (searchParams == null) {
626                 getSolrQuery();
627                 if (!(processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_FULL)
628                         || processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_EX_STAFF))) {
629 
630                     response = getDocstoreClientLocator().getDocstoreClient().search(searchParams);
631                     if (response.getSearchResults().size() == 0) {
632                         job.setTotalNoOfRecords("0");
633                     } else {
634                         job.setTotalNoOfRecords(String.valueOf(response.getTotalRecordCount()));
635                     }
636                 } else {
637                     response = getDocstoreClientLocator().getDocstoreClient().search(searchParams);
638                     job.setTotalNoOfRecords(String.valueOf(response.getTotalRecordCount()));
639                 }
640                 LOG.info("Total number of records to be exported :: " + job.getTotalNoOfRecords());
641 
642             } else {
643                 searchParams.setStartIndex(start);
644                 response = getDocstoreClientLocator().getDocstoreClient().search(searchParams);
645                 job.setTotalNoOfRecords(String.valueOf(response.getTotalRecordCount()));
646             }
647 
648 
649         } catch (Exception e) {
650             LOG.error("Error while performing solr query :: ", e);
651             throw e;
652         }
653     }
654 
655     /**
656      * converts the given date to UTC time
657      *
658      * @param date
659      * @return
660      * @throws ParseException
661      */
662     private static Date getUTCTime(Date date) throws ParseException {
663         DateFormat format = new SimpleDateFormat(SOLR_DT_FORMAT);
664         format.setTimeZone(TimeZone.getTimeZone("UTC"));
665         String utcStr = format.format(date);
666         DateFormat format2 = new SimpleDateFormat(SOLR_DT_FORMAT);
667         Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
668         cal.setTime(format2.parse(utcStr));
669         return cal.getTime();
670     }
671 
672     /**
673      * gets the default search condition
674      *
675      * @return
676      */
677     private SearchCondition getDefaultCondition() {
678         SearchCondition condition = new SearchCondition();
679         condition.setSearchScope(AND);
680         condition.setOperator(AND);
681         return condition;
682     }
683 
684     /**
685      * Writes the error records to the path
686      *
687      * @throws Exception
688      */
689     public void writeErrorFile() throws Exception {
690         if (errBuilder != null && StringUtils.isNotEmpty(errBuilder.toString())) {
691             File file = new File(filePath + FileSystems.getDefault().getSeparator() + processDef.getBatchProcessProfileName() + EXT_ERR_TXT);
692             FileUtils.writeStringToFile(file, errBuilder.toString(), "UTF-8", true);
693         }
694     }
695 
696     /**
697      * reads the filter criterias from the profile and sets the search params for solr query
698      *
699      * @param criteriaBos
700      * @return
701      * @throws ParseException
702      */
703     private String getFilterCriteria(List<OLEBatchProcessProfileFilterCriteriaBo> criteriaBos) throws ParseException {
704         //OLEBatchProcessProfileFilterCriteriaBo staffOnlyCriteriaBo = null;
705         for (OLEBatchProcessProfileFilterCriteriaBo bo : criteriaBos) {
706             if (processDef.getOleBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_INC)
707                     && bo.getFilterFieldName().equalsIgnoreCase((OLEConstants.OLEBatchProcess.DATE_UPDATED))) {
708                 continue;// do not add dateUpdated in params even if its present in the filter as it will be taken from last job run of the same profile
709             }
710             if (bo.getFilterFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STAFF_ONLY_FLAG)) {
711                 staffOnlyCriteriaBo = bo;
712                 //continue;
713             }
714             SearchCondition condition = getDefaultCondition();
715             SearchField searchField = new SearchField();
716             searchField.setFieldName(bo.getFilterFieldName());
717             //condition.setDocField(bo.getFilterFieldName());
718             if (StringUtils.isNotEmpty(bo.getFilterFieldValue())) { // one value
719                 Map<String, String> filterMap = new HashMap<>();
720                 filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
721                 Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
722                 if (filterBo.iterator().hasNext()) {
723                     OLEBatchProcessFilterCriteriaBo oleBatchProcessFilterCriteriaBo = filterBo.iterator().next();
724                     if (oleBatchProcessFilterCriteriaBo.getFieldType().equalsIgnoreCase(OLEConstants.OLEBatchProcess.DATE)) {
725                         searchField.setFieldValue("[" + getSolrDate(bo.getFilterFieldValue(), true) + " TO " + getSolrDate(bo.getFilterFieldValue(), false) + "]");
726                         //condition.setSearchText("[" + getSolrDate(bo.getFilterFieldValue(), true) + " TO " + getSolrDate(bo.getFilterFieldValue(), false) + "]");
727                         condition.setSearchScope(NONE);
728                         //condition.setOperator(RANGE);
729                         if (filterBo.iterator().next().getFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STATUS_UPDATED_ON)) {
730                             searchField.setDocType(DocType.BIB.getDescription());
731                         }
732                     } else if(oleBatchProcessFilterCriteriaBo.getFieldDisplayName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.OLE_BATCH_FLTR_CRITERIA_LOAD_FROM_FILE)){
733                          buildSearchConditions(bo.getFilterFieldValue());
734                     } else if (oleBatchProcessFilterCriteriaBo.getFieldDisplayName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.OLE_BATCH_FLTR_CRITERIA_BIB_STATUS)) {
735                         buildSearchConditionsForStatus(bo.getFilterFieldValue());
736                     } else {
737                         searchField.setDocType(DocType.BIB.getDescription());
738                         searchField.setFieldValue(bo.getFilterFieldValue());
739                         /*if (bo.getFilterFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STATUS_SEARCH)) {
740                             //To set bib status values as 'Catalogued' or 'Cataloguing' or 'None' in case sensitive.
741                             searchField.setFieldValue(StringUtils.capitalize(StringUtils.lowerCase(bo.getFilterFieldValue())));
742                         } else {
743                             searchField.setFieldValue(bo.getFilterFieldValue());
744                         }*/
745                         //condition.setSearchText(bo.getFilterFieldValue());
746                     }
747                 } else {
748                     try {
749                         InstanceMappingHelper instanceMappingHelper = new InstanceMappingHelper();
750                         String filterFieldNameTag = instanceMappingHelper.getTagForExportFilter(bo.getFilterFieldName());
751                         String filterFieldNameCode = instanceMappingHelper.getCodeForExportFilter(bo.getFilterFieldName());
752                         if (StringUtils.isEmpty(filterFieldNameTag) || StringUtils.isEmpty(filterFieldNameCode)) {
753                             searchField.setFieldName(bo.getFilterFieldName());
754                         } else {
755                             // Convert marc data field tag into its corresponding solr field
756                             filterFieldNameTag = OLEConstants.OLEBatchProcess.DYNAMIC_FIELD_PREFIX + filterFieldNameTag;
757                             String docField = filterFieldNameTag + filterFieldNameCode;
758                             searchField.setFieldName(docField);
759                         }
760                         condition.setSearchScope(NONE);
761                     } catch (StringIndexOutOfBoundsException e) {
762                         searchField.setFieldName(bo.getFilterFieldName());
763                     }
764                     searchField.setDocType(DocType.BIB.getDescription());
765                     searchField.setFieldValue(bo.getFilterFieldValue());
766                 }
767             } else if (StringUtils.isNotEmpty(bo.getFilterRangeFrom()) && StringUtils.isNotEmpty(bo.getFilterRangeTo())) {
768                 // range values
769                 //condition.setOperator(RANGE);
770                 condition.setSearchScope(NONE);
771                 Map<String, String> filterMap = new HashMap<>();
772                 filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
773                 Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
774                 if (!filterBo.iterator().hasNext()) return "";
775                 if (filterBo.iterator().next().getFieldType().equalsIgnoreCase(OLEConstants.OLEBatchProcess.DATE)) {
776                     searchField.setFieldValue("[" + getSolrDate(bo.getFilterRangeFrom(), true) + " TO " + getSolrDate(bo.getFilterRangeTo(), false) + "]");
777                     if (filterBo.iterator().next().getFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STATUS_UPDATED_ON)) {
778                         searchField.setDocType(DocType.BIB.getDescription());
779                     }
780                 } else {
781                     searchField.setFieldValue("[" + bo.getFilterRangeFrom() + " TO " + bo.getFilterRangeTo() + "]");
782                 }
783             } else if (StringUtils.isNotEmpty(bo.getFilterRangeFrom()) && StringUtils.isEmpty(bo.getFilterRangeTo())) {   // range values
784                 //condition.setOperator(RANGE);
785                 condition.setSearchScope(NONE);
786                 Map<String, String> filterMap = new HashMap<>();
787                 filterMap.put("ole_bat_field_nm", bo.getFilterFieldName());
788                 Collection<OLEBatchProcessFilterCriteriaBo> filterBo = KRADServiceLocator.getBusinessObjectService().findMatching(OLEBatchProcessFilterCriteriaBo.class, filterMap);
789                 if (!filterBo.iterator().hasNext()) return "";
790                 if (filterBo.iterator().next().getFieldType().equalsIgnoreCase(OLEConstants.OLEBatchProcess.DATE)) {
791                     searchField.setFieldValue("[" + getSolrDate(bo.getFilterRangeFrom(), true) + " TO NOW]");
792                 } else {
793                     searchField.setFieldValue("[" + bo.getFilterRangeFrom() + " TO *]");
794                 }
795             }
796             //to check if bib status or local id is present in the filter criteria, then select only the bib records by setting export type as full
797             if (bo.getFilterFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.LOCAL_ID_SEARCH) || bo.getFilterFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STATUS_SEARCH)
798                     || (StringUtils.isNotEmpty(processDef.getBatchProcessProfileBo().getDataToExport()) && processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(EXPORT_BIB_ONLY) && !bo.getFilterFieldName().equalsIgnoreCase(LOCAL_ID_DISPLAY))) {
799                 processDef.getOleBatchProcessProfileBo().setExportScope(EXPORT_FULL);
800                 //searchParams.setDocFormat(DocFormat.MARC.getDescription());
801                 searchField.setDocType(DocType.BIB.getDescription());
802                 /*if (bo.getFilterFieldName().equalsIgnoreCase(OLEConstants.OLEBatchProcess.STATUS_SEARCH))
803                     condition.setSearchScope(PHRASE);*/
804             }
805             if (!bo.getFilterFieldName().equalsIgnoreCase(LOCAL_ID_DISPLAY) && !bo.getFilterFieldName().equalsIgnoreCase(STATUS_SEARCH)) {
806                 condition.setSearchField(searchField);
807                 searchParams.getSearchConditions().add(condition);
808             }
809         }
810         return "";
811     }
812 
813     /**
814      * returns the record id - local identifier of the record
815      *
816      * @param record
817      * @return
818      */
819     private String getRecordId(Record record) {
820         if (record == null || record.getControlFields() == null || record.getControlFields().isEmpty() || record.getControlFields().size() < 2)
821             return null;
822         if (record.getControlFields().get(1) == null) return null;
823         VariableField field = (VariableField) record.getControlFields().get(1);
824         if (field instanceof ControlField) {
825             return ((ControlField) field).getData();
826         } else {
827             return null;
828         }
829 
830     }
831 
832     private void incrementalFilterExport(OLEBatchProcessProfileBo profileBo) throws Exception {
833         Object[] resultMap = null;
834         if (response.getSearchResults().size() > 0) {
835             service = new ExportDataServiceImpl();
836             resultMap = batchExport(profileBo);
837         }
838         //Write deleted bibid's  to a file
839         if (profileBo.getExportScope().equals(EXPORT_INC)
840                 || profileBo.getExportScope().equalsIgnoreCase(INCREMENTAL_EXPORT_EX_STAFF)) {
841             if (lastExportDate != null) {
842                 StringBuilder deleteId = new StringBuilder();
843                 if ( profileBo.getExportScope().equalsIgnoreCase(INCREMENTAL_EXPORT_EX_STAFF)) {
844                     List<String> incrementalBibIds = new ArrayList<>();
845                     List<Bib> incrementalBibs = new ArrayList<>();
846                      response = getIncrementalSolrQuery();
847                     for (SearchResult searchResult : response.getSearchResults()) {
848                         for (SearchResultField searchResultField : searchResult.getSearchResultFields()) {
849                             if (searchResultField.getFieldName().equalsIgnoreCase("bibIdentifier")) {
850                                 if (!incrementalBibIds.contains(searchResultField.getFieldValue())) {
851                                     incrementalBibIds.add(searchResultField.getFieldValue());
852                                 }
853                             }
854                         }
855                     }
856                     if (!CollectionUtils.isEmpty(incrementalBibIds)) {
857                         incrementalBibs = getDocstoreClientLocator().getDocstoreClient().retrieveBibs(incrementalBibIds);
858                         for (Bib bib : incrementalBibs) {
859                             if (bib.isStaffOnly()) {
860                                 deleteId.append(DocumentUniqueIDPrefix.getDocumentId(bib.getId())).append(COMMA);
861                             }
862                         }
863                         incrementalBibIds.clear();
864                         incrementalBibs.clear();
865                     }
866                 }
867                 response = getDeleteSolrQuery();
868                 if (response.getSearchResults().size() > 0 || deleteId != null) {
869                     if (response.getSearchResults().size() > 0) {
870                         Iterator<SearchResult> iterator = response.getSearchResults().iterator();
871                         while (iterator.hasNext()) {
872                             SearchResult searchresult = iterator.next();
873                             if (null != searchresult && searchresult.getSearchResultFields() != null) {
874                                 for (SearchResultField searchResultField : searchresult.getSearchResultFields()) {
875                                     if (searchResultField.getFieldName().equalsIgnoreCase("LocalId_display")) {
876                                         String id = searchResultField.getFieldValue();
877                                         if (id != null) {
878                                             deleteId.append(id);
879                                             deleteId.append(",");
880                                         }
881                                     }
882                                 }
883                             }
884                         }
885                     }
886                     String deleted = "";
887                     if (deleteId.length() > 0) {
888                         deleted = deleteId.substring(0, deleteId.length());
889                     }
890                     if (deleted.length() > 0 && deleted != "") {
891                         String ids[] = deleted.split(",");
892                         createFile(ids);
893                     }
894                     if (job.getTotalNoOfRecords() != null) {
895                         job.setTotalNoOfRecords(String.valueOf(Integer.valueOf(job.getTotalNoOfRecords()) + response.getTotalRecordCount()));
896                     } else {
897                         job.setTotalNoOfRecords(String.valueOf(response.getTotalRecordCount()));
898                     }
899                     if (job.getNoOfRecordsProcessed() != null) {
900                         job.setNoOfRecordsProcessed(String.valueOf(Integer.valueOf(job.getNoOfRecordsProcessed()) + response.getSearchResults().size()));
901                     } else {
902                         job.setNoOfRecordsProcessed(String.valueOf(response.getSearchResults().size()));
903                     }
904                     if (job.getNoOfSuccessRecords() != null) {
905                         job.setNoOfSuccessRecords(String.valueOf(Integer.valueOf(job.getNoOfSuccessRecords()) + response.getSearchResults().size()));
906                     } else {
907                         job.setNoOfSuccessRecords(String.valueOf(response.getSearchResults().size()));
908                     }
909                 }
910             }
911         }
912 
913         String homeDirectory = getBatchProcessFilePath(processDef.getBatchProcessType());
914         if (StringUtils.isNotEmpty(processDef.getDestinationDirectoryPath())) {
915             filePath = new File(processDef.getDestinationDirectoryPath() + FileSystems.getDefault().getSeparator() + job.getJobId());
916         } else if (filePath == null || !filePath.isDirectory()) {
917             filePath = new File(homeDirectory + FileSystems.getDefault().getSeparator() + job.getJobId());
918         }
919         job.setUploadFileName(filePath.getPath());
920         if (resultMap == null || resultMap[0].equals("0")) {
921             job.setStatus(OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED);
922         } else {
923             //bibDocList.add(XML_DEC);
924             bibDocList.addAll((List<String>) resultMap[1]);
925             processedRec = Integer.valueOf(resultMap[0].toString());
926         }
927         if (resultMap != null) {
928             if (resultMap[2] != null)
929                 errBuilder.append(resultMap[2].toString());
930             if (resultMap[3] != null)
931                 errCnt = resultMap[3].toString();
932         }
933     }
934 
935 
936     public void batchProcessExportFetch() throws Exception {
937         dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
938 
939         int totalRecords = 0;
940         int remainingRecords = 0;
941         int recordsToBeExportedToFile = 0;
942         int recProcessed = 0;
943         Boolean isBibOnly=true;
944 
945         if (!processDef.getBatchProcessProfileBo().getOleBatchProcessProfileMappingOptionsList().isEmpty()
946                 && StringUtils.isNotEmpty(processDef.getBatchProcessProfileBo().getDataToExport()) && (processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(OLEBatchProcess.EXPORT_BIB_AND_INSTANCE) || processDef.getBatchProcessProfileBo().getDataToExport().equalsIgnoreCase(OLEBatchProcess.EXPORT_BIB_INSTANCE_AND_EINSTANCE))) {
947             isBibOnly=false;
948         }
949         StopWatch timer = new StopWatch();
950         timer.start();
951 
952         Date date =new Date();
953         LOG.info("Batch Export Started : " +date.toString());
954 
955         BatchBibTreeDBUtil bibTreeDBUtil = null;
956         if(processDef.getBatchProcessProfileBo().getExportScope().equalsIgnoreCase(EXPORT_EX_STAFF)){
957             bibTreeDBUtil= new BatchBibTreeDBUtil(false);
958         }else{
959 
960             bibTreeDBUtil= new BatchBibTreeDBUtil();
961         }
962 
963 
964         job.setTotalNoOfRecords(bibTreeDBUtil.getTotalNoOfRecords());
965         updateJobProgress();
966 
967 
968         bibTreeDBUtil.init();
969 
970         recProcessed = Integer.parseInt(job.getNoOfRecordsProcessed());
971         totalRecords = Integer.parseInt(job.getTotalNoOfRecords());
972 
973 
974         int fileNumber = 1;
975         while (recProcessed < totalRecords) {
976             remainingRecords = totalRecords - recProcessed;
977             recordsToBeExportedToFile = Math.min(processDef.getMaxRecordsInFile(), remainingRecords);
978             if(totalRecords >= processDef.getMaxRecordsInFile()){
979                 if (StringUtils.isNotBlank(processDef.getDestinationDirectoryPath())) {
980                     fileName = processDef.getDestinationDirectoryPath() + "-" + dateTimeFormat.format(new Date())+ "-" + OLEConstants.OLEBatchProcess.PART + fileNumber;
981                 }else if (StringUtils.isNotBlank(processDef.getBatchProcessName())) {
982                     fileName = processDef.getBatchProcessName() + "-" + dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART + fileNumber;
983                 } else {
984                     fileName = job.getJobId()+ "-" +  dateTimeFormat.format(new Date()) + "-" + OLEConstants.OLEBatchProcess.PART + fileNumber;
985                 }
986             }else{
987                 if (StringUtils.isNotBlank(processDef.getDestinationDirectoryPath())) {
988                     fileName = processDef.getDestinationDirectoryPath() + "-" + dateTimeFormat.format(new Date());
989                 }else if (StringUtils.isNotBlank(processDef.getBatchProcessName())) {
990                     fileName = processDef.getBatchProcessName() + "-" + dateTimeFormat.format(new Date());
991                 } else {
992                     fileName = job.getJobId()+ "-" +  dateTimeFormat.format(new Date());
993                 }
994             }
995             fileNumber++;
996             if (!job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_COMPLETED)
997                     && !job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_CANCELLED)
998                     && !job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_PAUSED)
999                     && !job.getStatus().equalsIgnoreCase(OLEConstants.OLEBatchProcess.JOB_STATUS_STOPPED)) {
1000 
1001                 BatchExportFetch batchExportFetch = new BatchExportFetch(bibTreeDBUtil, recordsToBeExportedToFile, fileName, this, processDef, isBibOnly);
1002                 batchExportFetch.call();
1003                 recProcessed += recordsToBeExportedToFile;
1004 
1005             } else {
1006 
1007                 break;
1008             }
1009 
1010         }
1011 
1012         timer.stop();
1013         date =new Date();
1014         LOG.info("Batch Export Ended : " + date.toString() + " Time Taken : " + timer.toString());
1015         bibTreeDBUtil.closeConnections();
1016         try {
1017             writeErrorFile();
1018         } catch (Exception ex) {
1019             job.setStatus(JOB_STATUS_STOPPED);
1020         }
1021     }
1022 
1023     private void buildSearchConditions(String ids){
1024         String[] filterFieldValues = ids.split("\n");
1025         for(String id : filterFieldValues){
1026             SearchCondition searchCondition = new SearchCondition();
1027             searchCondition.setOperator(OR);
1028             searchCondition.setSearchScope(NONE);
1029             SearchField searchField = new SearchField();
1030             searchField.setDocType(DocType.BIB.getDescription());
1031             searchField.setFieldName(LOCAL_ID_SEARCH);
1032             searchField.setFieldValue(id);
1033             searchCondition.setSearchField(searchField);
1034             searchParams.getSearchConditions().add(searchCondition);
1035         }
1036     }
1037 
1038     /**
1039      * This method will build the search condition with the bib statuses and adds it to search params.
1040      * @param fieldValue
1041      */
1042     private void buildSearchConditionsForStatus(String fieldValue) {
1043         String[] filterFieldValues = fieldValue.split(",");
1044         StringBuilder bibStatusBuilder = new StringBuilder();
1045         // building the search condition field value
1046         String bibStatus = "";
1047         for (int i = 0; i < filterFieldValues.length; i++) {
1048             bibStatus = filterFieldValues[i];
1049             if (StringUtils.isNotBlank(bibStatus)) {
1050                 bibStatusBuilder.append("\"" + bibStatus + "\"");
1051                 if (i != filterFieldValues.length - 1) {
1052                     bibStatusBuilder.append(OR);
1053                 }
1054             }
1055         }
1056         SearchCondition searchCondition = new SearchCondition();
1057         searchCondition.setOperator(AND);
1058         searchCondition.setSearchScope(NONE);
1059         SearchField searchField = new SearchField();
1060         searchField.setDocType(DocType.BIB.getDescription());
1061         searchField.setFieldName(STATUS_SEARCH);
1062         searchField.setFieldValue(bibStatusBuilder.toString());
1063         searchCondition.setSearchField(searchField);
1064         searchParams.getSearchConditions().add(searchCondition);
1065     }
1066 
1067 
1068 
1069 
1070 
1071     public StringBuilder getErrBuilder() {
1072         return errBuilder;
1073     }
1074 
1075     public String getErrCnt() {
1076         return errCnt;
1077     }
1078 
1079     public void setErrCnt(String errCnt) {
1080         this.errCnt = errCnt;
1081     }
1082 
1083     public List<String> getBibDocList() {
1084         if (bibDocList == null) {
1085             bibDocList = new ArrayList<>();
1086         }
1087         return this.bibDocList;
1088     }
1089 
1090     public int getProcessedRec() {
1091         return processedRec;
1092     }
1093 
1094     public String getFileName() {
1095         return fileName;
1096     }
1097 
1098     public void setFileName(String fileName) {
1099         this.fileName = fileName;
1100     }
1101 
1102     public int getTotalRecordsExported() {
1103         return totalRecordsExported;
1104     }
1105 
1106     public void setTotalRecordsExported(int totalRecordsExported) {
1107         this.totalRecordsExported = totalRecordsExported;
1108     }
1109 
1110     public SearchResponse getResponse() {
1111         return response;
1112     }
1113 
1114     public void setResponse(SearchResponse response) {
1115         this.response = response;
1116     }
1117 
1118     public File getFilePath() {
1119         return filePath;
1120     }
1121 }