Search in sources :

Example 46 with LinkedList

use of java.util.LinkedList in project hibernate-orm by hibernate.

the class CorrectnessTestCase method checkForEmptyPendingPuts.

protected void checkForEmptyPendingPuts() throws Exception {
    Field pp = PutFromLoadValidator.class.getDeclaredField("pendingPuts");
    pp.setAccessible(true);
    Method getInvalidators = null;
    List<DelayedInvalidators> delayed = new LinkedList<>();
    for (int i = 0; i < sessionFactories.length; i++) {
        SessionFactoryImplementor sfi = (SessionFactoryImplementor) sessionFactories[i];
        for (Object regionName : sfi.getCache().getSecondLevelCacheRegionNames()) {
            PutFromLoadValidator validator = getPutFromLoadValidator(sfi, (String) regionName);
            if (validator == null) {
                log.warn("No validator for " + regionName);
                continue;
            }
            ConcurrentMap<Object, Object> map = (ConcurrentMap) pp.get(validator);
            for (Iterator<Map.Entry<Object, Object>> iterator = map.entrySet().iterator(); iterator.hasNext(); ) {
                Map.Entry entry = iterator.next();
                if (getInvalidators == null) {
                    getInvalidators = entry.getValue().getClass().getMethod("getInvalidators");
                    getInvalidators.setAccessible(true);
                }
                java.util.Collection invalidators = (java.util.Collection) getInvalidators.invoke(entry.getValue());
                if (invalidators != null && !invalidators.isEmpty()) {
                    delayed.add(new DelayedInvalidators(map, entry.getKey()));
                }
            }
        }
    }
    // poll until all invalidations come
    long deadline = System.currentTimeMillis() + 30000;
    while (System.currentTimeMillis() < deadline) {
        iterateInvalidators(delayed, getInvalidators, (k, i) -> {
        });
        if (delayed.isEmpty()) {
            break;
        }
        Thread.sleep(1000);
    }
    if (!delayed.isEmpty()) {
        iterateInvalidators(delayed, getInvalidators, (k, i) -> log.warnf("Left invalidators on key %s: %s", k, i));
        throw new IllegalStateException("Invalidators were not cleared: " + delayed.size());
    }
}
Also used : PutFromLoadValidator(org.hibernate.cache.infinispan.access.PutFromLoadValidator) SessionFactoryImplementor(org.hibernate.engine.spi.SessionFactoryImplementor) ConcurrentMap(java.util.concurrent.ConcurrentMap) Method(java.lang.reflect.Method) LinkedList(java.util.LinkedList) Field(java.lang.reflect.Field) Collection(org.hibernate.mapping.Collection) Map(java.util.Map) TreeMap(java.util.TreeMap) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap)

Example 47 with LinkedList

use of java.util.LinkedList in project pinot by linkedin.

the class TarGzCompressionUtils method unTar.

/** Untar an input file into an output file.

   * The output file is created in the output folder, having the same name
   * as the input file, minus the '.tar' extension.
   *
   * @param inputFile     the input .tar file
   * @param outputDir     the output directory file.
   * @throws IOException
   * @throws FileNotFoundException
   *
   * @return  The {@link List} of {@link File}s with the untared content.
   * @throws ArchiveException
   */
public static List<File> unTar(final File inputFile, final File outputDir) throws FileNotFoundException, IOException, ArchiveException {
    LOGGER.debug(String.format("Untaring %s to dir %s.", inputFile.getAbsolutePath(), outputDir.getAbsolutePath()));
    TarArchiveInputStream debInputStream = null;
    InputStream is = null;
    final List<File> untaredFiles = new LinkedList<File>();
    try {
        is = new GzipCompressorInputStream(new BufferedInputStream(new FileInputStream(inputFile)));
        debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is);
        TarArchiveEntry entry = null;
        while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
            final File outputFile = new File(outputDir, entry.getName());
            if (entry.isDirectory()) {
                LOGGER.debug(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
                if (!outputFile.exists()) {
                    LOGGER.debug(String.format("Attempting to create output directory %s.", outputFile.getAbsolutePath()));
                    if (!outputFile.mkdirs()) {
                        throw new IllegalStateException(String.format("Couldn't create directory %s.", outputFile.getAbsolutePath()));
                    }
                } else {
                    LOGGER.error("The directory already there. Deleting - " + outputFile.getAbsolutePath());
                    FileUtils.deleteDirectory(outputFile);
                }
            } else {
                LOGGER.debug(String.format("Creating output file %s.", outputFile.getAbsolutePath()));
                File directory = outputFile.getParentFile();
                if (!directory.exists()) {
                    directory.mkdirs();
                }
                OutputStream outputFileStream = null;
                try {
                    outputFileStream = new FileOutputStream(outputFile);
                    IOUtils.copy(debInputStream, outputFileStream);
                } finally {
                    IOUtils.closeQuietly(outputFileStream);
                }
            }
            untaredFiles.add(outputFile);
        }
    } finally {
        IOUtils.closeQuietly(debInputStream);
        IOUtils.closeQuietly(is);
    }
    return untaredFiles;
}
Also used : GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) BufferedInputStream(java.io.BufferedInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BufferedOutputStream(java.io.BufferedOutputStream) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) GzipCompressorOutputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) LinkedList(java.util.LinkedList) FileInputStream(java.io.FileInputStream) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ArchiveStreamFactory(org.apache.commons.compress.archivers.ArchiveStreamFactory) BufferedInputStream(java.io.BufferedInputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File)

Example 48 with LinkedList

use of java.util.LinkedList in project pinot by linkedin.

the class StarTreeIndexOperator method findMatchingLeafNodes.

private Queue<SearchEntry> findMatchingLeafNodes() {
    Queue<SearchEntry> matchedEntries = new LinkedList<>();
    Queue<SearchEntry> searchQueue = new LinkedList<>();
    HashBiMap<String, Integer> dimensionIndexToNameMapping = segment.getStarTree().getDimensionNameToIndexMap();
    SearchEntry startEntry = new SearchEntry();
    startEntry.starTreeIndexnode = segment.getStarTree().getRoot();
    startEntry.remainingPredicateColumns = new HashSet<>(predicatesMap.keySet());
    startEntry.remainingGroupByColumns = new HashSet<>(groupByColumns);
    searchQueue.add(startEntry);
    while (!searchQueue.isEmpty()) {
        SearchEntry searchEntry = searchQueue.remove();
        StarTreeIndexNodeInterf current = searchEntry.starTreeIndexnode;
        HashSet<String> remainingPredicateColumns = searchEntry.remainingPredicateColumns;
        HashSet<String> remainingGroupByColumns = searchEntry.remainingGroupByColumns;
        // Check if its leaf, or if there are no remaining predicates/groupbycolumns, and node has valid aggregated docId
        if (current.isLeaf() || (remainingPredicateColumns.isEmpty() && remainingGroupByColumns.isEmpty()) && isValidAggregatedDocId(current.getAggregatedDocumentId())) {
            // reached leaf
            matchedEntries.add(searchEntry);
            continue;
        }
        // Find next set of nodes to search
        String nextDimension = dimensionIndexToNameMapping.inverse().get(current.getChildDimensionName());
        HashSet<String> newRemainingPredicateColumns = new HashSet<>();
        newRemainingPredicateColumns.addAll(remainingPredicateColumns);
        HashSet<String> newRemainingGroupByColumns = new HashSet<>();
        newRemainingGroupByColumns.addAll(remainingGroupByColumns);
        addMatchingChildrenToQueue(searchQueue, current, nextDimension, newRemainingPredicateColumns, newRemainingGroupByColumns);
    }
    return matchedEntries;
}
Also used : StarTreeIndexNodeInterf(com.linkedin.pinot.core.startree.StarTreeIndexNodeInterf) LinkedList(java.util.LinkedList) HashSet(java.util.HashSet)

Example 49 with LinkedList

use of java.util.LinkedList in project pinot by linkedin.

the class RealtimeDimensionsSerDe method serialize.

public ByteBuffer serialize(GenericRow row) {
    List<Integer> rowConvertedToDictionaryId = new LinkedList<Integer>();
    List<Integer> columnOffsets = new LinkedList<Integer>();
    int pointer = 0;
    for (int i = 0; i < dataSchema.getDimensionNames().size(); i++) {
        columnOffsets.add(pointer);
        if (dataSchema.getFieldSpecFor(dataSchema.getDimensionNames().get(i)).isSingleValueField()) {
            rowConvertedToDictionaryId.add(dictionaryMap.get(dataSchema.getDimensionNames().get(i)).indexOf(row.getValue(dataSchema.getDimensionNames().get(i))));
            pointer += 1;
        } else {
            Object[] multivalues = (Object[]) row.getValue(dataSchema.getDimensionNames().get(i));
            if (multivalues != null && multivalues.length > 0) {
                Arrays.sort(multivalues);
                for (Object multivalue : multivalues) {
                    rowConvertedToDictionaryId.add(dictionaryMap.get(dataSchema.getDimensionNames().get(i)).indexOf(multivalue));
                }
                pointer += multivalues.length;
            } else {
                rowConvertedToDictionaryId.add(0);
                pointer += 1;
            }
        }
        if (i == dataSchema.getDimensionNames().size() - 1) {
            columnOffsets.add(pointer);
        }
    }
    ByteBuffer buff = ByteBuffer.allocate((columnOffsets.size() + rowConvertedToDictionaryId.size()) * 4);
    for (Integer offset : columnOffsets) {
        buff.putInt(offset + columnOffsets.size());
    }
    for (Integer dicId : rowConvertedToDictionaryId) {
        buff.putInt(dicId);
    }
    return buff;
}
Also used : ByteBuffer(java.nio.ByteBuffer) LinkedList(java.util.LinkedList)

Example 50 with LinkedList

use of java.util.LinkedList in project head by mifos.

the class HolidayServiceFacadeWebTier method holidaysByYear.

@Override
public Map<String, List<OfficeHoliday>> holidaysByYear() {
    List<HolidayBO> holidays = this.holidayDao.findAllHolidays();
    Map<String, List<OfficeHoliday>> holidaysByYear = new TreeMap<String, List<OfficeHoliday>>();
    for (HolidayBO holiday : holidays) {
        HolidayDetails holidayDetail = new HolidayDetails(holiday.getHolidayName(), holiday.getHolidayFromDate(), holiday.getHolidayThruDate(), holiday.getRepaymentRuleType().getValue());
        String holidayRepaymentRuleName = ApplicationContextProvider.getBean(MessageLookup.class).lookup(holiday.getRepaymentRuleType().getPropertiesKey());
        holidayDetail.setRepaymentRuleName(holidayRepaymentRuleName);
        int year = holiday.getThruDate().getYear();
        List<OfficeHoliday> holidaysInYear = holidaysByYear.get(Integer.toString(year));
        if (holidaysInYear == null) {
            holidaysInYear = new LinkedList<OfficeHoliday>();
        }
        holidaysInYear.add(new OfficeHoliday(holidayDetail, this.holidayDao.applicableOffices(holiday.getId())));
        holidaysByYear.put(Integer.toString(year), holidaysInYear);
    }
    sortValuesByFromDate(holidaysByYear);
    return holidaysByYear;
}
Also used : HolidayDetails(org.mifos.dto.domain.HolidayDetails) MessageLookup(org.mifos.application.master.MessageLookup) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) HolidayBO(org.mifos.application.holiday.business.HolidayBO) TreeMap(java.util.TreeMap) OfficeHoliday(org.mifos.dto.domain.OfficeHoliday)

Aggregations

LinkedList (java.util.LinkedList)10512 Test (org.junit.Test)1487 List (java.util.List)1463 HashMap (java.util.HashMap)1371 ArrayList (java.util.ArrayList)1313 Map (java.util.Map)871 IOException (java.io.IOException)800 File (java.io.File)695 HashSet (java.util.HashSet)605 LinkedHashMap (java.util.LinkedHashMap)382 GenericValue (org.apache.ofbiz.entity.GenericValue)296 Iterator (java.util.Iterator)277 Set (java.util.Set)255 GenericEntityException (org.apache.ofbiz.entity.GenericEntityException)232 Date (java.util.Date)222 Collection (java.util.Collection)201 Delegator (org.apache.ofbiz.entity.Delegator)162 Locale (java.util.Locale)158 URL (java.net.URL)154 BufferedReader (java.io.BufferedReader)146