Search in sources :

Example 6 with RestService

use of io.confluent.kafka.schemaregistry.client.rest.RestService in project schema-registry by confluentinc.

the class SchemaRegistryErrorHandlerTest method setupBasicAuthClient.

private void setupBasicAuthClient(String user, String password) {
    String restUrl = new StringBuilder(restApp.restConnect).insert(getSchemaRegistryProtocol().length() + 3, user + ":" + password + "@").toString();
    restApp.restClient = new RestService(restUrl);
    BasicAuthCredentialProvider basicAuthCredentialProvider = BasicAuthCredentialProviderFactory.getBasicAuthCredentialProvider("URL", new HashMap<String, String>());
    restApp.restClient.setBasicAuthCredentialProvider(basicAuthCredentialProvider);
}
Also used : BasicAuthCredentialProvider(io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider) RestService(io.confluent.kafka.schemaregistry.client.rest.RestService)

Example 7 with RestService

use of io.confluent.kafka.schemaregistry.client.rest.RestService in project schema-registry by confluentinc.

the class RestApiContextTest method testContextPaths.

@Test
public void testContextPaths() throws Exception {
    RestService restClient1 = new RestService(restApp.restConnect + "/contexts/.ctx1");
    RestService restClient2 = new RestService(restApp.restConnect + "/contexts/.ctx2");
    RestService restClient3 = new RestService(restApp.restConnect + "/contexts/.");
    RestService noCtxRestClient3 = new RestService(restApp.restConnect);
    String subject1 = "testTopic1";
    String subject2 = "testTopic2";
    String subject3 = "testTopic3";
    int schemasInSubject1 = 10;
    List<Integer> allVersionsInSubject1 = new ArrayList<Integer>();
    List<String> allSchemasInSubject1 = TestUtils.getRandomCanonicalAvroString(schemasInSubject1);
    int schemasInSubject2 = 5;
    List<Integer> allVersionsInSubject2 = new ArrayList<Integer>();
    List<String> allSchemasInSubject2 = TestUtils.getRandomCanonicalAvroString(schemasInSubject2);
    int schemasInSubject3 = 2;
    List<Integer> allVersionsInSubject3 = new ArrayList<Integer>();
    List<String> allSchemasInSubject3 = TestUtils.getRandomCanonicalAvroString(schemasInSubject3);
    // test getAllVersions with no existing data
    try {
        restClient1.getAllVersions(subject1);
        fail("Getting all versions from non-existing subject1 should fail with " + Errors.SUBJECT_NOT_FOUND_ERROR_CODE + " (subject not found)");
    } catch (RestClientException rce) {
        assertEquals("Should get a 404 status for non-existing subject", Errors.SUBJECT_NOT_FOUND_ERROR_CODE, rce.getErrorCode());
    }
    // test registering and verifying new schemas in subject1
    int schemaIdCounter = 1;
    for (int i = 0; i < schemasInSubject1; i++) {
        String schema = allSchemasInSubject1.get(i);
        int expectedVersion = i + 1;
        registerAndVerifySchema(restClient1, schema, schemaIdCounter, subject1);
        schemaIdCounter++;
        allVersionsInSubject1.add(expectedVersion);
    }
    // test re-registering existing schemas
    for (int i = 0; i < schemasInSubject1; i++) {
        int expectedId = i + 1;
        String schemaString = allSchemasInSubject1.get(i);
        int foundId = restClient1.registerSchema(schemaString, subject1);
        assertEquals("Re-registering an existing schema should return the existing version", expectedId, foundId);
    }
    // reset the schema id counter due to a different context
    schemaIdCounter = 1;
    // test registering schemas in subject2
    for (int i = 0; i < schemasInSubject2; i++) {
        String schema = allSchemasInSubject2.get(i);
        int expectedVersion = i + 1;
        registerAndVerifySchema(restClient2, schema, schemaIdCounter, subject2);
        schemaIdCounter++;
        allVersionsInSubject2.add(expectedVersion);
    }
    // reset the schema id counter due to a different context
    schemaIdCounter = 1;
    // test registering schemas in subject3
    for (int i = 0; i < schemasInSubject3; i++) {
        String schema = allSchemasInSubject3.get(i);
        int expectedVersion = i + 1;
        registerAndVerifySchema(restClient3, schema, schemaIdCounter, subject3);
        schemaIdCounter++;
        allVersionsInSubject3.add(expectedVersion);
    }
    // test getAllVersions with existing data
    assertEquals("Getting all versions from subject1 should match all registered versions", allVersionsInSubject1, restClient1.getAllVersions(subject1));
    assertEquals("Getting all versions from subject2 should match all registered versions", allVersionsInSubject2, restClient2.getAllVersions(subject2));
    assertEquals("Getting all versions from subject3 should match all registered versions", allVersionsInSubject3, restClient3.getAllVersions(subject3));
    assertEquals("Getting all versions from subject3 should match all registered versions", allVersionsInSubject3, noCtxRestClient3.getAllVersions(subject3));
    assertEquals("Getting all versions from subject3 should match all registered versions", allVersionsInSubject3, noCtxRestClient3.getAllVersions(":.:" + subject3));
    // test getAllContexts
    assertEquals("Getting all contexts should return all registered contexts", ImmutableList.of(DEFAULT_CONTEXT, ".ctx1", ".ctx2"), restClient1.getAllContexts());
    // test getAllSubjects with existing data
    assertEquals("Getting all subjects should match all registered subjects", Collections.singletonList(":.ctx1:" + subject1), restClient1.getAllSubjects());
    // test getAllSubjects with existing data
    assertEquals("Getting all subjects should match all registered subjects", Collections.singletonList(":.ctx2:" + subject2), restClient2.getAllSubjects());
    // test getAllSubjects with existing data
    assertEquals("Getting all subjects should match all registered subjects", Collections.singletonList(subject3), restClient3.getAllSubjects());
    // test getAllSubjects with existing data
    assertEquals("Getting all subjects should match all registered subjects", Collections.singletonList(subject3), noCtxRestClient3.getAllSubjects("", false));
}
Also used : ArrayList(java.util.ArrayList) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException) RestService(io.confluent.kafka.schemaregistry.client.rest.RestService) Test(org.junit.Test)

Example 8 with RestService

use of io.confluent.kafka.schemaregistry.client.rest.RestService in project ksql by confluentinc.

the class KsqlSchemaRegistryClientFactory method create.

public SchemaRegistryClient create() {
    final RestService restService = serviceSupplier.get();
    final SSLContext sslContext = sslFactory.sslContext();
    if (sslContext != null) {
        restService.setSslSocketFactory(sslContext.getSocketFactory());
    }
    return new CachedSchemaRegistryClient(restService, 1000);
}
Also used : SSLContext(javax.net.ssl.SSLContext) RestService(io.confluent.kafka.schemaregistry.client.rest.RestService) CachedSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient)

Example 9 with RestService

use of io.confluent.kafka.schemaregistry.client.rest.RestService in project apicurio-registry-examples by Apicurio.

the class ConfluentSerdesExample method createKafkaProducer.

/**
 * Creates the Kafka producer.
 */
private static Producer<String, Object> createKafkaProducer() {
    Properties props = new Properties();
    // Configure kafka settings
    props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
    props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME);
    props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all");
    props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    configureSecurityIfPresent(props);
    RestService restService = new RestService(CCOMPAT_API_URL);
    final Map<String, String> restServiceProperties = new HashMap<>();
    // If auth is enabled using the env var, we try to configure it
    if (Boolean.parseBoolean(System.getenv("CONFIGURE_AUTH"))) {
        restServiceProperties.put("basic.auth.credentials.source", "USER_INFO");
        restServiceProperties.put("schema.registry.basic.auth.user.info", String.format("%s:%s", System.getenv(SerdeConfig.AUTH_CLIENT_ID), System.getenv(SerdeConfig.AUTH_CLIENT_SECRET)));
    }
    CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restService, 100, restServiceProperties);
    Map<String, String> properties = new HashMap<>();
    // Configure Service Registry location (Confluent API)
    properties.put("schema.registry.url", CCOMPAT_API_URL);
    properties.put("auto.register.schemas", "true");
    // Map the topic name to the artifactId in the registry
    properties.put("value.subject.name.strategy", "io.confluent.kafka.serializers.subject.TopicRecordNameStrategy");
    // Use the Confluent provided Kafka Serializer for Avro
    KafkaAvroSerializer valueSerializer = new KafkaAvroSerializer(schemaRegistryClient, properties);
    StringSerializer keySerializer = new StringSerializer();
    // Create the Kafka producer
    Producer<String, Object> producer = new KafkaProducer<String, Object>(props, keySerializer, valueSerializer);
    return producer;
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) HashMap(java.util.HashMap) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) Properties(java.util.Properties) RestService(io.confluent.kafka.schemaregistry.client.rest.RestService) CachedSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Example 10 with RestService

use of io.confluent.kafka.schemaregistry.client.rest.RestService in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method setLeader.

/**
 * 'Inform' this SchemaRegistry instance which SchemaRegistry is the current leader.
 * If this instance is set as the new leader, ensure it is up-to-date with data in
 * the kafka store.
 *
 * @param newLeader Identity of the current leader. null means no leader is alive.
 */
@Override
public void setLeader(@Nullable SchemaRegistryIdentity newLeader) throws SchemaRegistryTimeoutException, SchemaRegistryStoreException, IdGenerationException {
    log.debug("Setting the leader to " + newLeader);
    // Only schema registry instances eligible for leader can be set to leader
    if (newLeader != null && !newLeader.getLeaderEligibility()) {
        throw new IllegalStateException("Tried to set an ineligible node to leader: " + newLeader);
    }
    kafkaStore.leaderLock().lock();
    try {
        SchemaRegistryIdentity previousLeader = leaderIdentity;
        leaderIdentity = newLeader;
        if (leaderIdentity == null) {
            leaderRestService = null;
        } else {
            leaderRestService = new RestService(leaderIdentity.getUrl());
            if (sslFactory != null && sslFactory.sslContext() != null) {
                leaderRestService.setSslSocketFactory(sslFactory.sslContext().getSocketFactory());
                leaderRestService.setHostnameVerifier(getHostnameVerifier());
            }
        }
        if (leaderIdentity != null && !leaderIdentity.equals(previousLeader) && isLeader()) {
            // The new leader may not know the exact last offset in the Kafka log. So, mark the
            // last offset invalid here
            kafkaStore.markLastWrittenOffsetInvalid();
            // leader
            try {
                kafkaStore.waitUntilKafkaReaderReachesLastOffset(initTimeout);
            } catch (StoreException e) {
                throw new SchemaRegistryStoreException("Exception getting latest offset ", e);
            }
            idGenerator.init();
        }
        metricsContainer.getLeaderNode().set(isLeader() ? 1 : 0);
    } finally {
        kafkaStore.leaderLock().unlock();
    }
}
Also used : SchemaRegistryStoreException(io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException) RestService(io.confluent.kafka.schemaregistry.client.rest.RestService) SchemaRegistryStoreException(io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException) StoreException(io.confluent.kafka.schemaregistry.storage.exceptions.StoreException)

Aggregations

RestService (io.confluent.kafka.schemaregistry.client.rest.RestService)14 CachedSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient)7 Test (org.junit.Test)4 HashMap (java.util.HashMap)3 AvroSchemaProvider (io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider)2 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)2 SchemaString (io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString)2 RestClientException (io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)2 BasicAuthCredentialProvider (io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider)2 JsonSchemaProvider (io.confluent.kafka.schemaregistry.json.JsonSchemaProvider)2 ProtobufSchemaProvider (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider)2 Collectors (java.util.stream.Collectors)2 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 FakeTicker (com.google.common.testing.FakeTicker)1 ParsedSchema (io.confluent.kafka.schemaregistry.ParsedSchema)1 SchemaProvider (io.confluent.kafka.schemaregistry.SchemaProvider)1 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)1 Mode (io.confluent.kafka.schemaregistry.client.rest.entities.Mode)1 Schema (io.confluent.kafka.schemaregistry.client.rest.entities.Schema)1