use of ca.uhn.hl7v2.HapiContext in project camel by apache.
the class HL7ValidateTest method createRouteBuilder.
protected RouteBuilder createRouteBuilder() throws Exception {
HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext(new NoValidation());
Parser p = new GenericParser(hapiContext);
hl7 = new HL7DataFormat();
hl7.setParser(p);
/*
* Let's start by adding a validation rule to the default validation
* that disallows PID-2 to be empty.
*/
ValidationRuleBuilder builder = new ValidationRuleBuilder() {
private static final long serialVersionUID = 1L;
@Override
protected void configure() {
forVersion(Version.V24).message("ADT", "*").terser("PID-2", not(empty()));
}
};
ValidationContext customValidationContext = ValidationContextFactory.fromBuilder(builder);
HapiContext customContext = new DefaultHapiContext(customValidationContext);
final Parser customParser = new GenericParser(customContext);
return new RouteBuilder() {
public void configure() throws Exception {
from("direct:unmarshalFailed").unmarshal().hl7().to("mock:unmarshal");
from("direct:unmarshalOk").unmarshal().hl7(false).to("mock:unmarshal");
from("direct:unmarshalOkCustom").unmarshal(hl7).to("mock:unmarshal");
from("direct:start1").marshal().hl7(customParser).to("mock:end");
from("direct:start2").marshal().hl7(true).to("mock:end");
}
};
}
use of ca.uhn.hl7v2.HapiContext in project nifi by apache.
the class TestHL7Query method createMessage.
private HL7Message createMessage(final String msgText) throws HL7Exception, IOException {
final HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext(ValidationContextFactory.noValidation());
final PipeParser parser = hapiContext.getPipeParser();
final Message message = parser.parse(msgText);
return new HapiMessage(message);
}
use of ca.uhn.hl7v2.HapiContext in project nifi by apache.
the class RouteHL7 method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
@SuppressWarnings("resource") final HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext((ca.uhn.hl7v2.validation.ValidationContext) ValidationContextFactory.noValidation());
final PipeParser parser = hapiContext.getPipeParser();
final String hl7Text = new String(buffer, charset);
final HL7Message message;
try {
final Message hapiMessage = parser.parse(hl7Text);
message = new HapiMessage(hapiMessage);
} catch (final Exception e) {
getLogger().error("Failed to parse {} as HL7 due to {}; routing to failure", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
return;
}
final Set<String> matchingRels = new HashSet<>();
final Map<Relationship, HL7Query> queryMap = queries;
for (final Map.Entry<Relationship, HL7Query> entry : queryMap.entrySet()) {
final Relationship relationship = entry.getKey();
final HL7Query query = entry.getValue();
final QueryResult result = query.evaluate(message);
if (result.isMatch()) {
FlowFile clone = session.clone(flowFile);
clone = session.putAttribute(clone, "RouteHL7.Route", relationship.getName());
session.transfer(clone, relationship);
session.getProvenanceReporter().route(clone, relationship);
matchingRels.add(relationship.getName());
}
}
session.transfer(flowFile, REL_ORIGINAL);
getLogger().info("Routed a copy of {} to {} relationships: {}", new Object[] { flowFile, matchingRels.size(), matchingRels });
}
use of ca.uhn.hl7v2.HapiContext in project streamsx.health by IBMStreams.
the class TestServer method main.
public static void main(String[] args) {
try {
HapiContext ctx = new DefaultHapiContext();
CanonicalModelClassFactory mcf = new CanonicalModelClassFactory("2.6");
ctx.setModelClassFactory(mcf);
ctx.setValidationRuleBuilder(new NoValidationBuilder());
ctx.setExecutorService(Executors.newCachedThreadPool());
HL7Service server = ctx.newServer(8082, false);
server.registerApplication(new HapiMessageHandler(null));
server.registerConnectionListener(new ConnectionListener() {
@Override
public void connectionReceived(Connection c) {
System.out.println("Connection received.");
}
@Override
public void connectionDiscarded(Connection c) {
System.out.println("Connection discarded.");
}
});
server.startAndWait();
ctx.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of ca.uhn.hl7v2.HapiContext in project nifi by apache.
the class ExtractHL7Attributes method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());
final Boolean useSegmentNames = context.getProperty(USE_SEGMENT_NAMES).asBoolean();
final Boolean parseSegmentFields = context.getProperty(PARSE_SEGMENT_FIELDS).asBoolean();
final Boolean skipValidation = context.getProperty(SKIP_VALIDATION).asBoolean();
final String inputVersion = context.getProperty(HL7_INPUT_VERSION).getValue();
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
@SuppressWarnings("resource") final HapiContext hapiContext = new DefaultHapiContext();
if (!inputVersion.equals("autodetect")) {
hapiContext.setModelClassFactory(new CanonicalModelClassFactory(inputVersion));
}
if (skipValidation) {
hapiContext.setValidationContext((ValidationContext) ValidationContextFactory.noValidation());
}
final PipeParser parser = hapiContext.getPipeParser();
final String hl7Text = new String(buffer, charset);
try {
final Message message = parser.parse(hl7Text);
final Map<String, String> attributes = getAttributes(message, useSegmentNames, parseSegmentFields);
flowFile = session.putAllAttributes(flowFile, attributes);
getLogger().debug("Added the following attributes for {}: {}", new Object[] { flowFile, attributes });
} catch (final HL7Exception e) {
getLogger().error("Failed to extract attributes from {} due to {}", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
return;
}
session.transfer(flowFile, REL_SUCCESS);
}
Aggregations