use of jakarta.json.JsonPatch in project zilla by aklivity.
the class KafkaCachePartition method writeEntryFinish.
public void writeEntryFinish(ArrayFW<KafkaHeaderFW> headers, KafkaDeltaType deltaType) {
final Node head = sentinel.previous;
assert head != sentinel;
final KafkaCacheSegment headSegment = head.segment;
assert headSegment != null;
final KafkaCacheFile logFile = headSegment.logFile();
final KafkaCacheFile deltaFile = headSegment.deltaFile();
final KafkaCacheFile hashFile = headSegment.hashFile();
final KafkaCacheFile indexFile = headSegment.indexFile();
final int logAvailable = logFile.available();
final int logRequired = headers.sizeof();
assert logAvailable >= logRequired : String.format("%s %d >= %d", headSegment, logAvailable, logRequired);
logFile.appendBytes(headers);
final long offsetDelta = (int) (progress - headSegment.baseOffset());
final long indexEntry = (offsetDelta << 32) | logFile.markValue();
if (!headers.isEmpty()) {
final DirectBuffer buffer = headers.buffer();
final ByteBuffer byteBuffer = buffer.byteBuffer();
assert byteBuffer != null;
byteBuffer.clear();
headers.forEach(h -> {
final long hash = computeHash(h);
final long hashEntry = (hash << 32) | logFile.markValue();
hashFile.appendLong(hashEntry);
});
}
assert indexFile.available() >= Long.BYTES;
indexFile.appendLong(indexEntry);
final KafkaCacheEntryFW headEntry = logFile.readBytes(logFile.markValue(), headEntryRO::wrap);
if (deltaType == JSON_PATCH && ancestorEntry != null && ancestorEntry.valueLen() != -1 && headEntry.valueLen() != -1) {
final OctetsFW ancestorValue = ancestorEntry.value();
final OctetsFW headValue = headEntry.value();
assert headEntry.offset$() == progress;
final JsonProvider json = JsonProvider.provider();
ancestorIn.wrap(ancestorValue.buffer(), ancestorValue.offset(), ancestorValue.sizeof());
final JsonReader ancestorReader = json.createReader(ancestorIn);
final JsonStructure ancestorJson = ancestorReader.read();
ancestorReader.close();
headIn.wrap(headValue.buffer(), headValue.offset(), headValue.sizeof());
final JsonReader headReader = json.createReader(headIn);
final JsonStructure headJson = headReader.read();
headReader.close();
final JsonPatch diff = json.createDiff(ancestorJson, headJson);
final JsonArray diffJson = diff.toJsonArray();
diffOut.wrap(diffBuffer, Integer.BYTES);
final JsonWriter writer = json.createWriter(diffOut);
writer.write(diffJson);
writer.close();
// TODO: signal delta.sizeof > head.sizeof via null delta, otherwise delta file can exceed log file
final int deltaLength = diffOut.position();
diffBuffer.putInt(0, deltaLength);
deltaFile.appendBytes(diffBuffer, 0, Integer.BYTES + deltaLength);
}
headSegment.lastOffset(progress);
}
use of jakarta.json.JsonPatch in project zilla by aklivity.
the class ConfigureTask method call.
@Override
public Void call() throws Exception {
String configText;
if (configURL == null) {
configText = "{\"name\":\"default\"}";
} else if ("https".equals(configURL.getProtocol()) || "https".equals(configURL.getProtocol())) {
HttpClient client = HttpClient.newBuilder().version(HTTP_2).followRedirects(NORMAL).build();
HttpRequest request = HttpRequest.newBuilder().GET().uri(configURL.toURI()).build();
HttpResponse<String> response = client.send(request, BodyHandlers.ofString());
String body = response.body();
configText = body;
} else {
URLConnection connection = configURL.openConnection();
try (InputStream input = connection.getInputStream()) {
configText = new String(input.readAllBytes(), UTF_8);
}
}
if (config.configSyntaxMustache()) {
configText = Mustache.resolve(configText, System::getenv);
}
logger.accept(configText);
List<String> errors = new LinkedList<>();
parse: try {
InputStream schemaInput = Engine.class.getResourceAsStream("internal/schema/engine.schema.json");
JsonProvider schemaProvider = JsonProvider.provider();
JsonReader schemaReader = schemaProvider.createReader(schemaInput);
JsonObject schemaObject = schemaReader.readObject();
for (URL schemaType : schemaTypes) {
InputStream schemaPatchInput = schemaType.openStream();
JsonReader schemaPatchReader = schemaProvider.createReader(schemaPatchInput);
JsonArray schemaPatchArray = schemaPatchReader.readArray();
JsonPatch schemaPatch = schemaProvider.createPatch(schemaPatchArray);
schemaObject = schemaPatch.apply(schemaObject);
}
JsonParser schemaParser = schemaProvider.createParserFactory(null).createParser(new StringReader(schemaObject.toString()));
JsonValidationService service = JsonValidationService.newInstance();
ProblemHandler handler = service.createProblemPrinter(errors::add);
JsonSchemaReader reader = service.createSchemaReader(schemaParser);
JsonSchema schema = new UniquePropertyKeysSchema(reader.read());
JsonProvider provider = service.createJsonProvider(schema, parser -> handler);
provider.createReader(new StringReader(configText)).read();
if (!errors.isEmpty()) {
break parse;
}
JsonbConfig config = new JsonbConfig().withAdapters(new NamespaceAdapter());
Jsonb jsonb = JsonbBuilder.newBuilder().withProvider(provider).withConfig(config).build();
NamespaceConfig namespace = jsonb.fromJson(configText, NamespaceConfig.class);
if (!errors.isEmpty()) {
break parse;
}
namespace.id = supplyId.applyAsInt(namespace.name);
for (BindingConfig binding : namespace.bindings) {
binding.id = NamespacedId.id(namespace.id, supplyId.applyAsInt(binding.entry));
if (binding.vault != null) {
binding.vault.id = NamespacedId.id(supplyId.applyAsInt(ofNullable(binding.vault.namespace).orElse(namespace.name)), supplyId.applyAsInt(binding.vault.name));
}
// TODO: consider route exit namespace
for (RouteConfig route : binding.routes) {
route.id = NamespacedId.id(namespace.id, supplyId.applyAsInt(route.exit));
}
// TODO: consider binding exit namespace
if (binding.exit != null) {
binding.exit.id = NamespacedId.id(namespace.id, supplyId.applyAsInt(binding.exit.exit));
}
tuning.affinity(binding.id, tuning.affinity(binding.id));
}
for (VaultConfig vault : namespace.vaults) {
vault.id = NamespacedId.id(namespace.id, supplyId.applyAsInt(vault.name));
}
CompletableFuture<Void> future = CompletableFuture.completedFuture(null);
for (DispatchAgent dispatcher : dispatchers) {
future = CompletableFuture.allOf(future, dispatcher.attach(namespace));
}
future.join();
extensions.forEach(e -> e.onConfigured(context));
} catch (Throwable ex) {
errorHandler.onError(ex);
}
if (!errors.isEmpty()) {
errors.forEach(msg -> errorHandler.onError(new JsonException(msg)));
}
return null;
}
use of jakarta.json.JsonPatch in project zilla by aklivity.
the class ConfigSchemaRule method apply.
@Override
public Statement apply(Statement base, Description description) {
Objects.requireNonNull(schemaName, "schema");
schemaPatchNames.forEach(n -> Objects.requireNonNull(n, "schemaPatch"));
Function<String, InputStream> findResource = description.getTestClass().getClassLoader()::getResourceAsStream;
InputStream schemaInput = findResource.apply(schemaName);
JsonProvider schemaProvider = JsonProvider.provider();
JsonReader schemaReader = schemaProvider.createReader(schemaInput);
JsonObject schemaObject = schemaReader.readObject();
for (String schemaPatchName : schemaPatchNames) {
InputStream schemaPatchInput = findResource.apply(schemaPatchName);
Objects.requireNonNull(schemaPatchInput, "schemaPatch");
JsonReader schemaPatchReader = schemaProvider.createReader(schemaPatchInput);
JsonArray schemaPatchArray = schemaPatchReader.readArray();
JsonPatch schemaPatch = schemaProvider.createPatch(schemaPatchArray);
schemaObject = schemaPatch.apply(schemaObject);
}
JsonParser schemaParser = schemaProvider.createParserFactory(null).createParser(new StringReader(schemaObject.toString()));
JsonValidationService service = JsonValidationService.newInstance();
ProblemHandler handler = service.createProblemPrinter(msg -> rethrowUnchecked(new JsonException(msg)));
JsonSchemaReader reader = service.createSchemaReader(schemaParser);
JsonSchema schema = reader.read();
provider = service.createJsonProvider(schema, parser -> handler);
if (configurationRoot != null) {
String configFormat = String.format("%s/%%s", configurationRoot);
findConfig = configName -> findResource.apply(String.format(configFormat, configName));
} else {
Class<?> testClass = description.getTestClass();
String configFormat = String.format("%s-%%s", testClass.getSimpleName());
findConfig = configName -> testClass.getResourceAsStream(String.format(configFormat, configName));
}
return base;
}
use of jakarta.json.JsonPatch in project resteasy by resteasy.
the class JsonpPatchMethodFilter method applyPatch.
@Override
protected byte[] applyPatch(final ContainerRequestContext requestContext, final byte[] targetJsonBytes) throws IOException, ProcessingException {
HttpRequest request = ResteasyContext.getContextData(HttpRequest.class);
// TODO: look at if we need to get reader factory from ContextResolver
Charset charset = AbstractJsonpProvider.getCharset(requestContext.getMediaType());
ByteArrayInputStream is = new ByteArrayInputStream(targetJsonBytes);
if (charset == null) {
charset = Charset.defaultCharset();
}
JsonReader reader = readerFactory.createReader(is, charset);
JsonObject targetJson = reader.readObject();
JsonObject result = null;
try {
if (MediaType.APPLICATION_JSON_PATCH_JSON_TYPE.isCompatible(requestContext.getMediaType())) {
JsonReader arrayReader = readerFactory.createReader(request.getInputStream(), charset);
JsonArray jsonArray = arrayReader.readArray();
JsonPatch patch = Json.createPatch(jsonArray);
result = patch.apply(targetJson);
} else {
JsonReader valueReader = readerFactory.createReader(request.getInputStream(), charset);
JsonValue mergePatchValue = valueReader.readValue();
final JsonMergePatch mergePatch = Json.createMergePatch(mergePatchValue);
result = mergePatch.apply(targetJson).asJsonObject();
}
} catch (JsonException e) {
// TODO: talk with jsonp community fix this
if (e.getMessage().contains("Illegal value") || e.getMessage().contains("JSON Patch must")) {
throw new BadRequestException(e.getMessage());
}
throw new Failure(e, HttpResponseCodes.SC_CONFLICT);
}
ByteArrayOutputStream targetOutputStream = new ByteArrayOutputStream();
JsonWriter jsonWriter = writerFactory.createWriter(targetOutputStream, charset);
jsonWriter.write(result);
return targetOutputStream.toByteArray();
}
Aggregations