001package org.kuali.ole.docstore.indexer.solr;
002
003import org.apache.commons.lang.time.StopWatch;
004import org.apache.solr.common.SolrInputDocument;
005import org.kuali.ole.docstore.discovery.solr.work.bib.dublin.WorkBibDublinDocBuilder;
006import org.kuali.ole.docstore.discovery.solr.work.bib.dublin.unqualified.WorkBibDublinUnQualifiedDocBuilder;
007import org.kuali.ole.docstore.model.enums.DocFormat;
008import org.kuali.ole.docstore.model.xmlpojo.ingest.RequestDocument;
009import org.kuali.ole.docstore.utility.BatchIngestStatistics;
010import org.kuali.ole.docstore.utility.BulkIngestStatistics;
011import org.slf4j.Logger;
012import org.slf4j.LoggerFactory;
013
014import java.util.ArrayList;
015import java.util.List;
016
017/**
018 * Created with IntelliJ IDEA.
019 * User: mjagan
020 * Date: 7/2/13
021 * Time: 6:15 PM
022 * To change this template use File | Settings | File Templates.
023 */
024public class WorkBibDocumentIndexer extends AbstractDocumentIndexer {
025
026    private Logger logger = LoggerFactory.getLogger(this.getClass());
027    private static WorkBibDocumentIndexer ourInstance = null;
028
029    public static WorkBibDocumentIndexer getInstance() {
030        if (null == ourInstance) {
031            ourInstance = new WorkBibDocumentIndexer();
032        }
033        return ourInstance;
034    }
035
036    @Override
037    public String indexDocuments(List<RequestDocument> requestDocuments, boolean commit) {
038        BatchIngestStatistics batchStatistics = BulkIngestStatistics.getInstance().getCurrentBatch();
039
040        String result = null;
041        StopWatch timer = new StopWatch();
042        StopWatch xmlToObjTime = new StopWatch();
043        xmlToObjTime.start();
044        xmlToObjTime.suspend();
045        timer.start();
046        List<SolrInputDocument> solrInputDocuments = new ArrayList<SolrInputDocument>();
047        if (requestDocuments != null && requestDocuments.size() > 0) {
048            StopWatch buildSolrInputDocTime = new StopWatch();
049            StopWatch xmlToPojoTimer = new StopWatch();
050            buildSolrInputDocTime.start();
051            buildSolrInputDocTime.suspend();
052            xmlToPojoTimer.start();
053            xmlToPojoTimer.suspend();
054            try {
055                for (RequestDocument requestDocument : requestDocuments) {
056
057                    if (DocFormat.DUBLIN_CORE.isEqualTo(requestDocument.getFormat())) {
058                        new WorkBibDublinDocBuilder().buildSolrInputDocument(requestDocument, solrInputDocuments);
059                    } else if (DocFormat.DUBLIN_UNQUALIFIED.isEqualTo(requestDocument.getFormat())) {
060                        new WorkBibDublinUnQualifiedDocBuilder()
061                                .buildSolrInputDocument(requestDocument, solrInputDocuments);
062
063                    }
064                    assignUUIDs(solrInputDocuments, null);
065                }
066            } catch (Exception e1) {
067                result = buildFailureMsg(null, "Indexing failed. " + e1.getMessage());
068                logger.error(result, e1);
069            }
070            timer.stop();
071            if ((null == solrInputDocuments) || (solrInputDocuments.isEmpty())) {
072                result = buildFailureMsg(null, "No valid documents found in input.");
073                return result;
074            }
075            int numDocs = solrInputDocuments.size();
076            batchStatistics.setTimeToConvertXmlToPojo(xmlToPojoTimer.getTime());
077            batchStatistics.setTimeToConvertToSolrInputDocs(buildSolrInputDocTime.getTime());
078            logger.info("Conversion to Solr docs- Num:" + numDocs + ": Time taken:" + timer.toString());
079            result = indexSolrDocuments(solrInputDocuments, commit);
080        }
081        return result;
082    }
083}