Search in sources :

Example 96 with Logger

use of org.slf4j.Logger in project cuba by cuba-platform.

the class BulkEditorWindow method commitChanges.

protected void commitChanges() {
    List<String> fields = new ArrayList<>();
    for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) {
        Field field = fieldEntry.getValue();
        if (isFieldChanged(field)) {
            fields.add(managedFields.get(fieldEntry.getKey()).getFqn());
        }
    }
    for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) {
        Field field = fieldEntry.getValue();
        if (!field.isEnabled()) {
            for (Entity item : items) {
                ensureEmbeddedPropertyCreated(item, fieldEntry.getKey());
                item.setValueEx(fieldEntry.getKey(), null);
            }
        } else if (isFieldChanged(field)) {
            for (Entity item : items) {
                ensureEmbeddedPropertyCreated(item, fieldEntry.getKey());
                item.setValueEx(fieldEntry.getKey(), field.getValue());
            }
        }
    }
    Set<Entity> committed = dataSupplier.commit(new CommitContext(items));
    Logger logger = LoggerFactory.getLogger(BulkEditorWindow.class);
    logger.info("Applied bulk editing for {} entries of {}. Changed properties: {}", committed.size(), metaClass, StringUtils.join(fields, ", "));
    showNotification(formatMessage("bulk.successMessage", committed.size()), NotificationType.HUMANIZED);
    close(COMMIT_ACTION_ID);
}
Also used : BaseGenericIdEntity(com.haulmont.cuba.core.entity.BaseGenericIdEntity) Entity(com.haulmont.cuba.core.entity.Entity) Logger(org.slf4j.Logger)

Example 97 with Logger

use of org.slf4j.Logger in project qpid-broker-j by apache.

the class EmbeddedBrokerPerClassAdminImpl method setClassQualifiedTestName.

private void setClassQualifiedTestName(final String name) {
    final LoggerContext loggerContext = ((ch.qos.logback.classic.Logger) LOGGER).getLoggerContext();
    loggerContext.putProperty(LogbackPropertyValueDiscriminator.CLASS_QUALIFIED_TEST_NAME, name);
}
Also used : Logger(org.slf4j.Logger) LoggerContext(ch.qos.logback.classic.LoggerContext)

Example 98 with Logger

use of org.slf4j.Logger in project nifi by apache.

the class TestSimpleProcessLogger method before.

@Before
public void before() {
    task = mock(ReportingTask.class);
    when(task.getIdentifier()).thenReturn("foo");
    when(task.toString()).thenReturn("MyTask");
    componentLog = new SimpleProcessLogger(task.getIdentifier(), task);
    try {
        Field loggerField = componentLog.getClass().getDeclaredField("logger");
        loggerField.setAccessible(true);
        logger = mock(Logger.class);
        when(logger.isDebugEnabled()).thenReturn(true);
        when(logger.isInfoEnabled()).thenReturn(true);
        when(logger.isWarnEnabled()).thenReturn(true);
        when(logger.isErrorEnabled()).thenReturn(true);
        when(logger.isTraceEnabled()).thenReturn(true);
        loggerField.set(componentLog, logger);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : Field(java.lang.reflect.Field) Logger(org.slf4j.Logger) ReportingTask(org.apache.nifi.reporting.ReportingTask) Before(org.junit.Before)

Example 99 with Logger

use of org.slf4j.Logger in project nifi by apache.

the class TestPersistentProvenanceRepository method testModifyIndexWhileSearching.

@Test(timeout = 10000)
public void testModifyIndexWhileSearching() throws IOException, InterruptedException, ParseException {
    assumeFalse(isWindowsEnvironment());
    final RepositoryConfiguration config = createConfiguration();
    config.setMaxRecordLife(30, TimeUnit.SECONDS);
    config.setMaxStorageCapacity(1024L * 1024L * 10);
    config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
    config.setMaxEventFileCapacity(1024L * 1024L * 10);
    config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
    final CountDownLatch obtainIndexSearcherLatch = new CountDownLatch(2);
    repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {

        private CachingIndexManager wrappedManager = null;

        // Create an IndexManager that adds a delay before returning the Index Searcher.
        @Override
        protected synchronized CachingIndexManager getIndexManager() {
            if (wrappedManager == null) {
                final IndexManager mgr = super.getIndexManager();
                final Logger logger = LoggerFactory.getLogger("IndexManager");
                wrappedManager = new CachingIndexManager() {

                    final AtomicInteger indexSearcherCount = new AtomicInteger(0);

                    @Override
                    public EventIndexSearcher borrowIndexSearcher(File indexDir) throws IOException {
                        final EventIndexSearcher searcher = mgr.borrowIndexSearcher(indexDir);
                        final int idx = indexSearcherCount.incrementAndGet();
                        obtainIndexSearcherLatch.countDown();
                        // second thread is still holding the searcher
                        try {
                            if (idx == 1) {
                                Thread.sleep(3000L);
                            } else {
                                Thread.sleep(5000L);
                            }
                        } catch (InterruptedException e) {
                            throw new IOException("Interrupted", e);
                        }
                        logger.info("Releasing index searcher");
                        return searcher;
                    }

                    @Override
                    public EventIndexWriter borrowIndexWriter(File indexingDirectory) throws IOException {
                        return mgr.borrowIndexWriter(indexingDirectory);
                    }

                    @Override
                    public void close() throws IOException {
                        mgr.close();
                    }

                    @Override
                    public boolean removeIndex(File indexDirectory) {
                        mgr.removeIndex(indexDirectory);
                        return true;
                    }

                    @Override
                    public void returnIndexSearcher(EventIndexSearcher searcher) {
                        mgr.returnIndexSearcher(searcher);
                    }

                    @Override
                    public void returnIndexWriter(EventIndexWriter writer) {
                        mgr.returnIndexWriter(writer);
                    }
                };
            }
            return wrappedManager;
        }
    };
    repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
    final String uuid = "10000000-0000-0000-0000-000000000000";
    final Map<String, String> attributes = new HashMap<>();
    attributes.put("abc", "xyz");
    attributes.put("xyz", "abc");
    attributes.put("filename", "file-" + uuid);
    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    attributes.put("uuid", uuid);
    builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    for (int i = 0; i < 10; i++) {
        builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
        attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
        repo.registerEvent(builder.build());
    }
    repo.waitForRollover();
    // Perform a query. This will ensure that an IndexSearcher is created and cached.
    final Query query = new Query(UUID.randomUUID().toString());
    query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
    query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
    query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
    query.setMaxResults(100);
    // Run a query in a background thread. When this thread goes to obtain the IndexSearcher, it will have a 5 second delay.
    // That delay will occur as the main thread is updating the index. This should result in the search creating a new Index Reader
    // that can properly query the index.
    final int numThreads = 2;
    final CountDownLatch performSearchLatch = new CountDownLatch(numThreads);
    final Runnable searchRunnable = new Runnable() {

        @Override
        public void run() {
            QueryResult result;
            try {
                result = repo.queryEvents(query, createUser());
            } catch (IOException e) {
                e.printStackTrace();
                Assert.fail(e.toString());
                return;
            }
            System.out.println("Finished search: " + result);
            performSearchLatch.countDown();
        }
    };
    // Kick off the searcher threads
    for (int i = 0; i < numThreads; i++) {
        final Thread searchThread = new Thread(searchRunnable);
        searchThread.start();
    }
    // Wait until we've obtained the Index Searchers before modifying the index.
    obtainIndexSearcherLatch.await();
    // add more events to the repo
    for (int i = 0; i < 10; i++) {
        builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
        attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
        repo.registerEvent(builder.build());
    }
    // Force a rollover to occur. This will modify the index.
    repo.rolloverWithLock(true);
    // Wait for the repository to roll over.
    repo.waitForRollover();
    // Wait for the searches to complete.
    performSearchLatch.await();
}
Also used : Query(org.apache.nifi.provenance.search.Query) CachingIndexManager(org.apache.nifi.provenance.lucene.CachingIndexManager) HashMap(java.util.HashMap) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) Logger(org.slf4j.Logger) IndexManager(org.apache.nifi.provenance.lucene.IndexManager) CachingIndexManager(org.apache.nifi.provenance.lucene.CachingIndexManager) QueryResult(org.apache.nifi.provenance.search.QueryResult) EventIndexSearcher(org.apache.nifi.provenance.index.EventIndexSearcher) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) EventIndexWriter(org.apache.nifi.provenance.index.EventIndexWriter) TestUtil.createFlowFile(org.apache.nifi.provenance.TestUtil.createFlowFile) FlowFile(org.apache.nifi.flowfile.FlowFile) File(java.io.File) Test(org.junit.Test)

Example 100 with Logger

use of org.slf4j.Logger in project nifi by apache.

the class TestControllerStatusReportingTask method testProcessorLoggerName.

@Test
public void testProcessorLoggerName() throws Exception {
    Logger processorLogger = getLogger("processorLogger");
    assertEquals("org.apache.nifi.controller.ControllerStatusReportingTask.Processors", processorLogger.getName());
}
Also used : Logger(org.slf4j.Logger) Test(org.junit.Test)

Aggregations

Logger (org.slf4j.Logger)1088 Test (org.junit.Test)249 IOException (java.io.IOException)127 ENotificationImpl (org.eclipse.emf.ecore.impl.ENotificationImpl)110 ArrayList (java.util.ArrayList)71 InputStream (java.io.InputStream)64 List (java.util.List)59 File (java.io.File)56 Map (java.util.Map)51 LoggerFactory (org.slf4j.LoggerFactory)46 Test (org.testng.annotations.Test)43 HashMap (java.util.HashMap)39 Properties (java.util.Properties)35 HashSet (java.util.HashSet)31 FileInputStream (java.io.FileInputStream)29 Transfer (org.commonjava.maven.galley.model.Transfer)29 Set (java.util.Set)28 StoreKey (org.commonjava.indy.model.core.StoreKey)28 ArtifactStore (org.commonjava.indy.model.core.ArtifactStore)27 Date (java.util.Date)26