Search in sources :

Example 21 with SchemaNotFoundException

use of org.apache.nifi.schema.access.SchemaNotFoundException in project nifi by apache.

the class PublisherLease method publish.

void publish(final FlowFile flowFile, final RecordSet recordSet, final RecordSetWriterFactory writerFactory, final RecordSchema schema, final String messageKeyField, final String topic) throws IOException {
    if (tracker == null) {
        tracker = new InFlightMessageTracker(logger);
    }
    final ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
    Record record;
    int recordCount = 0;
    try {
        while ((record = recordSet.next()) != null) {
            recordCount++;
            baos.reset();
            Map<String, String> additionalAttributes = Collections.emptyMap();
            try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) {
                final WriteResult writeResult = writer.write(record);
                additionalAttributes = writeResult.getAttributes();
                writer.flush();
            }
            final byte[] messageContent = baos.toByteArray();
            final String key = messageKeyField == null ? null : record.getAsString(messageKeyField);
            final byte[] messageKey = (key == null) ? null : key.getBytes(StandardCharsets.UTF_8);
            publish(flowFile, additionalAttributes, messageKey, messageContent, topic, tracker);
            if (tracker.isFailed(flowFile)) {
                // If we have a failure, don't try to send anything else.
                return;
            }
        }
        if (recordCount == 0) {
            tracker.trackEmpty(flowFile);
        }
    } catch (final TokenTooLargeException ttle) {
        tracker.fail(flowFile, ttle);
    } catch (final SchemaNotFoundException snfe) {
        throw new IOException(snfe);
    } catch (final Exception e) {
        tracker.fail(flowFile, e);
        poison();
        throw e;
    }
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) TimeoutException(java.util.concurrent.TimeoutException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) IOException(java.io.IOException) WriteResult(org.apache.nifi.serialization.WriteResult) TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.apache.nifi.serialization.record.Record) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException)

Example 22 with SchemaNotFoundException

use of org.apache.nifi.schema.access.SchemaNotFoundException in project nifi by apache.

the class PutMongoRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY).asControllerService(RecordReaderFactory.class);
    final WriteConcern writeConcern = getWriteConcern(context);
    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);
    List<Document> inserts = new ArrayList<>();
    int ceiling = context.getProperty(INSERT_COUNT).asInteger();
    int added = 0;
    boolean error = false;
    try (final InputStream inStream = session.read(flowFile);
        final RecordReader reader = recordParserFactory.createRecordReader(flowFile, inStream, getLogger())) {
        RecordSchema schema = reader.getSchema();
        Record record;
        while ((record = reader.nextRecord()) != null) {
            // Convert each Record to HashMap and put into the Mongo document
            Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getRecordDataType(record.getSchema()));
            Document document = new Document();
            for (String name : schema.getFieldNames()) {
                document.put(name, contentMap.get(name));
            }
            inserts.add(document);
            if (inserts.size() == ceiling) {
                collection.insertMany(inserts);
                added += inserts.size();
                inserts = new ArrayList<>();
            }
        }
        if (inserts.size() > 0) {
            collection.insertMany(inserts);
        }
    } catch (SchemaNotFoundException | IOException | MalformedRecordException e) {
        getLogger().error("PutMongoRecord failed with error:", e);
        session.transfer(flowFile, REL_FAILURE);
        error = true;
    } finally {
        if (!error) {
            session.getProvenanceReporter().send(flowFile, context.getProperty(URI).evaluateAttributeExpressions().getValue(), String.format("Added %d documents to MongoDB.", added));
            session.transfer(flowFile, REL_SUCCESS);
            getLogger().info("Inserted {} records into MongoDB", new Object[] { added });
        }
    }
    session.commit();
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Document(org.bson.Document) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) WriteConcern(com.mongodb.WriteConcern) Record(org.apache.nifi.serialization.record.Record) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Map(java.util.Map)

Example 23 with SchemaNotFoundException

use of org.apache.nifi.schema.access.SchemaNotFoundException in project nifi by apache.

the class PublishKafkaRecord_0_11 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(1, DataUnit.MB, 500));
    if (flowFiles.isEmpty()) {
        return;
    }
    final PublisherPool pool = getPublisherPool(context);
    if (pool == null) {
        context.yield();
        return;
    }
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
    final long startTime = System.nanoTime();
    try (final PublisherLease lease = pool.obtainPublisher()) {
        if (useTransactions) {
            lease.beginTransaction();
        }
        // Send each FlowFile to Kafka asynchronously.
        final Iterator<FlowFile> itr = flowFiles.iterator();
        while (itr.hasNext()) {
            final FlowFile flowFile = itr.next();
            if (!isScheduled()) {
                // If stopped, re-queue FlowFile instead of sending it
                if (useTransactions) {
                    session.rollback();
                    lease.rollback();
                    return;
                }
                session.transfer(flowFile);
                itr.remove();
                continue;
            }
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
            final String messageKeyField = context.getProperty(MESSAGE_KEY_FIELD).evaluateAttributeExpressions(flowFile).getValue();
            try {
                session.read(flowFile, new InputStreamCallback() {

                    @Override
                    public void process(final InputStream rawIn) throws IOException {
                        try (final InputStream in = new BufferedInputStream(rawIn)) {
                            final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger());
                            final RecordSet recordSet = reader.createRecordSet();
                            final RecordSchema schema = writerFactory.getSchema(flowFile.getAttributes(), recordSet.getSchema());
                            lease.publish(flowFile, recordSet, writerFactory, schema, messageKeyField, topic);
                        } catch (final SchemaNotFoundException | MalformedRecordException e) {
                            throw new ProcessException(e);
                        }
                    }
                });
            } catch (final Exception e) {
                // The FlowFile will be obtained and the error logged below, when calling publishResult.getFailedFlowFiles()
                lease.fail(flowFile, e);
                continue;
            }
        }
        // Complete the send
        final PublishResult publishResult = lease.complete();
        if (publishResult.isFailure()) {
            getLogger().info("Failed to send FlowFile to kafka; transferring to failure");
            session.transfer(flowFiles, REL_FAILURE);
            return;
        }
        // Transfer any successful FlowFiles.
        final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
        for (FlowFile success : flowFiles) {
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
            final int msgCount = publishResult.getSuccessfulMessageCount(success);
            success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
            session.adjustCounter("Messages Sent", msgCount, true);
            final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
            session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
            session.transfer(success, REL_SUCCESS);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) BufferedInputStream(java.io.BufferedInputStream) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema)

Example 24 with SchemaNotFoundException

use of org.apache.nifi.schema.access.SchemaNotFoundException in project nifi by apache.

the class QueryRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final FlowFile original = session.get();
    if (original == null) {
        return;
    }
    final StopWatch stopWatch = new StopWatch(true);
    final RecordSetWriterFactory recordSetWriterFactory = context.getProperty(RECORD_WRITER_FACTORY).asControllerService(RecordSetWriterFactory.class);
    final RecordReaderFactory recordReaderFactory = context.getProperty(RECORD_READER_FACTORY).asControllerService(RecordReaderFactory.class);
    final Map<FlowFile, Relationship> transformedFlowFiles = new HashMap<>();
    final Set<FlowFile> createdFlowFiles = new HashSet<>();
    // Determine the Record Reader's schema
    final RecordSchema readerSchema;
    try (final InputStream rawIn = session.read(original)) {
        final Map<String, String> originalAttributes = original.getAttributes();
        final RecordReader reader = recordReaderFactory.createRecordReader(originalAttributes, rawIn, getLogger());
        final RecordSchema inputSchema = reader.getSchema();
        readerSchema = recordSetWriterFactory.getSchema(originalAttributes, inputSchema);
    } catch (final Exception e) {
        getLogger().error("Failed to determine Record Schema from {}; routing to failure", new Object[] { original, e });
        session.transfer(original, REL_FAILURE);
        return;
    }
    // Determine the schema for writing the data
    final Map<String, String> originalAttributes = original.getAttributes();
    int recordsRead = 0;
    try {
        for (final PropertyDescriptor descriptor : context.getProperties().keySet()) {
            if (!descriptor.isDynamic()) {
                continue;
            }
            final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
            // We have to fork a child because we may need to read the input FlowFile more than once,
            // and we cannot call session.read() on the original FlowFile while we are within a write
            // callback for the original FlowFile.
            FlowFile transformed = session.create(original);
            boolean flowFileRemoved = false;
            try {
                final String sql = context.getProperty(descriptor).evaluateAttributeExpressions(original).getValue();
                final AtomicReference<WriteResult> writeResultRef = new AtomicReference<>();
                final QueryResult queryResult;
                if (context.getProperty(CACHE_SCHEMA).asBoolean()) {
                    queryResult = queryWithCache(session, original, sql, context, recordReaderFactory);
                } else {
                    queryResult = query(session, original, sql, context, recordReaderFactory);
                }
                final AtomicReference<String> mimeTypeRef = new AtomicReference<>();
                try {
                    final ResultSet rs = queryResult.getResultSet();
                    transformed = session.write(transformed, new OutputStreamCallback() {

                        @Override
                        public void process(final OutputStream out) throws IOException {
                            final ResultSetRecordSet recordSet;
                            final RecordSchema writeSchema;
                            try {
                                recordSet = new ResultSetRecordSet(rs, readerSchema);
                                final RecordSchema resultSetSchema = recordSet.getSchema();
                                writeSchema = recordSetWriterFactory.getSchema(originalAttributes, resultSetSchema);
                            } catch (final SQLException | SchemaNotFoundException e) {
                                throw new ProcessException(e);
                            }
                            try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(getLogger(), writeSchema, out)) {
                                writeResultRef.set(resultSetWriter.write(recordSet));
                                mimeTypeRef.set(resultSetWriter.getMimeType());
                            } catch (final Exception e) {
                                throw new IOException(e);
                            }
                        }
                    });
                } finally {
                    closeQuietly(queryResult);
                }
                recordsRead = Math.max(recordsRead, queryResult.getRecordsRead());
                final WriteResult result = writeResultRef.get();
                if (result.getRecordCount() == 0 && !context.getProperty(INCLUDE_ZERO_RECORD_FLOWFILES).asBoolean()) {
                    session.remove(transformed);
                    flowFileRemoved = true;
                    transformedFlowFiles.remove(transformed);
                    getLogger().info("Transformed {} but the result contained no data so will not pass on a FlowFile", new Object[] { original });
                } else {
                    final Map<String, String> attributesToAdd = new HashMap<>();
                    if (result.getAttributes() != null) {
                        attributesToAdd.putAll(result.getAttributes());
                    }
                    attributesToAdd.put(CoreAttributes.MIME_TYPE.key(), mimeTypeRef.get());
                    attributesToAdd.put("record.count", String.valueOf(result.getRecordCount()));
                    transformed = session.putAllAttributes(transformed, attributesToAdd);
                    transformedFlowFiles.put(transformed, relationship);
                    session.adjustCounter("Records Written", result.getRecordCount(), false);
                }
            } finally {
                // Ensure that we have the FlowFile in the set in case we throw any Exception
                if (!flowFileRemoved) {
                    createdFlowFiles.add(transformed);
                }
            }
        }
        final long elapsedMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS);
        if (transformedFlowFiles.size() > 0) {
            session.getProvenanceReporter().fork(original, transformedFlowFiles.keySet(), elapsedMillis);
            for (final Map.Entry<FlowFile, Relationship> entry : transformedFlowFiles.entrySet()) {
                final FlowFile transformed = entry.getKey();
                final Relationship relationship = entry.getValue();
                session.getProvenanceReporter().route(transformed, relationship);
                session.transfer(transformed, relationship);
            }
        }
        getLogger().info("Successfully queried {} in {} millis", new Object[] { original, elapsedMillis });
        session.transfer(original, REL_ORIGINAL);
    } catch (final SQLException e) {
        getLogger().error("Unable to query {} due to {}", new Object[] { original, e.getCause() == null ? e : e.getCause() });
        session.remove(createdFlowFiles);
        session.transfer(original, REL_FAILURE);
    } catch (final Exception e) {
        getLogger().error("Unable to query {} due to {}", new Object[] { original, e });
        session.remove(createdFlowFiles);
        session.transfer(original, REL_FAILURE);
    }
    session.adjustCounter("Records Read", recordsRead, false);
}
Also used : HashMap(java.util.HashMap) SQLException(java.sql.SQLException) RecordReader(org.apache.nifi.serialization.RecordReader) OutputStream(java.io.OutputStream) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) ResultSet(java.sql.ResultSet) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) HashSet(java.util.HashSet) FlowFile(org.apache.nifi.flowfile.FlowFile) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) InputStream(java.io.InputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) ResultSetRecordSet(org.apache.nifi.serialization.record.ResultSetRecordSet) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) SQLException(java.sql.SQLException) IOException(java.io.IOException) StopWatch(org.apache.nifi.util.StopWatch) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) WriteResult(org.apache.nifi.serialization.WriteResult) Relationship(org.apache.nifi.processor.Relationship) DynamicRelationship(org.apache.nifi.annotation.behavior.DynamicRelationship) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) Map(java.util.Map) HashMap(java.util.HashMap)

Example 25 with SchemaNotFoundException

use of org.apache.nifi.schema.access.SchemaNotFoundException in project nifi by apache.

the class TestConsumeAzureEventHub method setupRecordReader.

private void setupRecordReader(List<EventData> eventDataList, int throwExceptionAt, String writeFailureWith) throws MalformedRecordException, IOException, SchemaNotFoundException {
    final RecordReaderFactory readerFactory = mock(RecordReaderFactory.class);
    processor.setReaderFactory(readerFactory);
    final RecordReader reader = mock(RecordReader.class);
    when(readerFactory.createRecordReader(anyMap(), any(), any())).thenReturn(reader);
    final List<Record> recordList = eventDataList.stream().map(eventData -> toRecord(new String(eventData.getBytes()))).collect(Collectors.toList());
    // Add null to indicate the end of records.
    final Function<List<Record>, List<Record>> addEndRecord = rs -> rs.stream().flatMap(r -> r.getAsString("value").equals(writeFailureWith) ? Stream.of(r) : Stream.of(r, null)).collect(Collectors.toList());
    final List<Record> recordSetList = addEndRecord.apply(recordList);
    final Record[] records = recordSetList.toArray(new Record[recordSetList.size()]);
    switch(throwExceptionAt) {
        case -1:
            when(reader.nextRecord()).thenReturn(records[0], Arrays.copyOfRange(records, 1, records.length));
            break;
        case 0:
            when(reader.nextRecord()).thenThrow(new MalformedRecordException("Simulating Record parse failure.")).thenReturn(records[0], Arrays.copyOfRange(records, 1, records.length));
            break;
        default:
            final List<Record> recordList1 = addEndRecord.apply(recordList.subList(0, throwExceptionAt));
            final List<Record> recordList2 = addEndRecord.apply(recordList.subList(throwExceptionAt + 1, recordList.size()));
            final Record[] records1 = recordList1.toArray(new Record[recordList1.size()]);
            final Record[] records2 = recordList2.toArray(new Record[recordList2.size()]);
            when(reader.nextRecord()).thenReturn(records1[0], Arrays.copyOfRange(records1, 1, records1.length)).thenThrow(new MalformedRecordException("Simulating Record parse failure.")).thenReturn(records2[0], Arrays.copyOfRange(records2, 1, records2.length));
    }
}
Also used : Arrays(java.util.Arrays) HashMap(java.util.HashMap) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) Mockito.doThrow(org.mockito.Mockito.doThrow) RecordReader(org.apache.nifi.serialization.RecordReader) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) Map(java.util.Map) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) Before(org.junit.Before) OutputStream(java.io.OutputStream) MockProcessSession(org.apache.nifi.util.MockProcessSession) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) ProvenanceEventType(org.apache.nifi.provenance.ProvenanceEventType) RecordField(org.apache.nifi.serialization.record.RecordField) WriteResult(org.apache.nifi.serialization.WriteResult) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) Test(org.junit.Test) IOException(java.io.IOException) Mockito.when(org.mockito.Mockito.when) EventData(com.microsoft.azure.eventhubs.EventData) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) PartitionContext(com.microsoft.azure.eventprocessorhost.PartitionContext) Matchers.any(org.mockito.Matchers.any) Mockito(org.mockito.Mockito) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Stream(java.util.stream.Stream) Matchers.anyMap(org.mockito.Matchers.anyMap) SharedSessionState(org.apache.nifi.util.SharedSessionState) MockComponentLog(org.apache.nifi.util.MockComponentLog) ProcessorInitializationContext(org.apache.nifi.processor.ProcessorInitializationContext) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordFieldType(org.apache.nifi.serialization.record.RecordFieldType) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) MockFlowFile(org.apache.nifi.util.MockFlowFile) Mockito.mock(org.mockito.Mockito.mock) RecordReader(org.apache.nifi.serialization.RecordReader) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) List(java.util.List) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException)

Aggregations

SchemaNotFoundException (org.apache.nifi.schema.access.SchemaNotFoundException)26 IOException (java.io.IOException)19 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)19 MalformedRecordException (org.apache.nifi.serialization.MalformedRecordException)13 InputStream (java.io.InputStream)12 RecordReader (org.apache.nifi.serialization.RecordReader)12 RecordReaderFactory (org.apache.nifi.serialization.RecordReaderFactory)12 FlowFile (org.apache.nifi.flowfile.FlowFile)11 ProcessException (org.apache.nifi.processor.exception.ProcessException)11 RecordSetWriterFactory (org.apache.nifi.serialization.RecordSetWriterFactory)10 Record (org.apache.nifi.serialization.record.Record)9 RecordSetWriter (org.apache.nifi.serialization.RecordSetWriter)8 OutputStream (java.io.OutputStream)7 Map (java.util.Map)7 HashMap (java.util.HashMap)6 WriteResult (org.apache.nifi.serialization.WriteResult)5 JsonNode (com.fasterxml.jackson.databind.JsonNode)4 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)4 BufferedInputStream (java.io.BufferedInputStream)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3