use of org.apache.thrift.TSerializer in project vcell by virtualcell.
the class CopasiOptimizationSolver method solveRemoteApi.
public static OptimizationResultSet solveRemoteApi(ParameterEstimationTaskSimulatorIDA parestSimulator, ParameterEstimationTask parameterEstimationTask, CopasiOptSolverCallbacks optSolverCallbacks, MathMappingCallback mathMappingCallback) throws IOException, ExpressionException, OptimizationException {
try {
OptProblem optProblem = CopasiServicePython.makeOptProblem(parameterEstimationTask);
boolean bIgnoreCertProblems = true;
boolean bIgnoreHostMismatch = true;
// e.g. vcell.serverhost=vcellapi.cam.uchc.edu:8080
String serverHost = PropertyLoader.getRequiredProperty(PropertyLoader.vcellServerHost);
String[] parts = serverHost.split(":");
String host = parts[0];
int port = Integer.parseInt(parts[1]);
VCellApiClient apiClient = new VCellApiClient(host, port, bIgnoreCertProblems, bIgnoreHostMismatch);
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
String optProblemJson = serializer.toString(optProblem);
String optimizationId = apiClient.submitOptimization(optProblemJson);
// 20 second minute timeout
final long TIMEOUT_MS = 1000 * 20;
long startTime = System.currentTimeMillis();
OptRun optRun = null;
while ((System.currentTimeMillis() - startTime) < TIMEOUT_MS) {
if (optSolverCallbacks.getStopRequested()) {
throw new RuntimeException(STOP_REQUESTED);
}
String optRunJson = apiClient.getOptRunJson(optimizationId);
TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
optRun = new OptRun();
deserializer.deserialize(optRun, optRunJson.getBytes());
OptRunStatus status = optRun.status;
if (status == OptRunStatus.Complete) {
System.out.println("job " + optimizationId + ": status " + status + " " + optRun.getOptResultSet().toString());
break;
}
if (status == OptRunStatus.Failed) {
throw new RuntimeException("optimization failed, message=" + optRun.statusMessage);
}
System.out.println("job " + optimizationId + ": status " + status);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
System.out.println("done with optimization");
OptResultSet optResultSet = optRun.getOptResultSet();
int numFittedParameters = optResultSet.getOptParameterValues().size();
String[] paramNames = new String[numFittedParameters];
double[] paramValues = new double[numFittedParameters];
for (int pIndex = 0; pIndex < numFittedParameters; pIndex++) {
OptParameterValue optParamValue = optResultSet.getOptParameterValues().get(pIndex);
paramNames[pIndex] = optParamValue.parameterName;
paramValues[pIndex] = optParamValue.bestValue;
}
OptimizationStatus status = new OptimizationStatus(OptimizationStatus.NORMAL_TERMINATION, optRun.statusMessage);
OptRunResultSet optRunResultSet = new OptRunResultSet(paramValues, optResultSet.objectiveFunction, optResultSet.numFunctionEvaluations, status);
OptSolverResultSet copasiOptSolverResultSet = new OptSolverResultSet(paramNames, optRunResultSet);
RowColumnResultSet copasiRcResultSet = parestSimulator.getRowColumnRestultSetByBestEstimations(parameterEstimationTask, paramNames, paramValues);
OptimizationResultSet copasiOptimizationResultSet = new OptimizationResultSet(copasiOptSolverResultSet, copasiRcResultSet);
System.out.println("-----------SOLUTION FROM VCellAPI---------------\n" + optResultSet.toString());
return copasiOptimizationResultSet;
} catch (Exception e) {
e.printStackTrace(System.out);
if (e.getMessage() != null && e.getMessage().equals(STOP_REQUESTED)) {
throw UserCancelException.CANCEL_GENERIC;
}
throw new OptimizationException(e.getCause() != null ? e.getCause().getMessage() : e.getMessage());
}
}
use of org.apache.thrift.TSerializer in project vcell by virtualcell.
the class OptimizationServerResource method get_html.
@Override
public Representation get_html() {
VCellApiApplication application = ((VCellApiApplication) getApplication());
User vcellUser = application.getVCellUser(getChallengeResponse(), AuthenticationPolicy.ignoreInvalidCredentials);
OptRun optRun = getOptRun(vcellUser);
if (optRun == null) {
throw new RuntimeException("optimization not found");
}
try {
Map<String, Object> dataModel = new HashMap<String, Object>();
// +"?"+VCellApiApplication.REDIRECTURL_FORMNAME+"="+getRequest().getResourceRef().toUrl());
dataModel.put("loginurl", "/" + VCellApiApplication.LOGINFORM);
dataModel.put("logouturl", "/" + VCellApiApplication.LOGOUT + "?" + VCellApiApplication.REDIRECTURL_FORMNAME + "=" + Reference.encode(getRequest().getResourceRef().toUrl().toString()));
if (vcellUser != null) {
dataModel.put("userid", vcellUser.getName());
}
dataModel.put("optId", getQueryValue(VCellApiApplication.OPTIMIZATIONID));
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
String optRunJson = new String(serializer.serialize(optRun));
dataModel.put("optimization", new JSONObject(optRunJson));
dataModel.put("jsonResponse", optRunJson);
Configuration templateConfiguration = application.getTemplateConfiguration();
Representation formFtl = new ClientResource(LocalReference.createClapReference("/optimization.ftl")).get();
TemplateRepresentation templateRepresentation = new TemplateRepresentation(formFtl, templateConfiguration, dataModel, MediaType.TEXT_HTML);
return templateRepresentation;
} catch (Exception e) {
e.printStackTrace();
throw new ResourceException(Status.SERVER_ERROR_INTERNAL, e.getMessage());
}
}
use of org.apache.thrift.TSerializer in project hive by apache.
the class DBSerializer method writeTo.
@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException {
dbObject.putToParameters(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), additionalPropertiesProvider.getCurrentReplicationState());
try {
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
String value = serializer.toString(dbObject);
writer.jsonGenerator.writeStringField(FIELD_NAME, value);
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
}
}
use of org.apache.thrift.TSerializer in project hive by apache.
the class FunctionSerializer method writeTo.
@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException, MetaException {
List<ResourceUri> resourceUris = new ArrayList<>();
if (function.getResourceUris() != null) {
for (ResourceUri uri : function.getResourceUris()) {
Path inputPath = new Path(uri.getUri());
if ("hdfs".equals(inputPath.toUri().getScheme())) {
FileSystem fileSystem = inputPath.getFileSystem(hiveConf);
Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem);
String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem);
String encodedSrcUri = ReplChangeManager.getInstance(hiveConf).encodeFileUri(qualifiedUri.toString(), checkSum, null);
if (copyAtLoad) {
if (hiveConf.getBoolVar(HiveConf.ConfVars.REPL_HA_DATAPATH_REPLACE_REMOTE_NAMESERVICE)) {
encodedSrcUri = Utils.replaceNameserviceInEncodedURI(encodedSrcUri, hiveConf);
}
resourceUris.add(new ResourceUri(uri.getResourceType(), encodedSrcUri));
} else {
Path newBinaryPath = new Path(functionDataRoot, qualifiedUri.getName());
resourceUris.add(new ResourceUri(uri.getResourceType(), newBinaryPath.toString()));
functionBinaryCopyPaths.add(new EximUtil.DataCopyPath(additionalPropertiesProvider, new Path(encodedSrcUri), newBinaryPath));
}
} else {
resourceUris.add(uri);
}
}
}
Function copyObj = new Function(this.function);
if (!resourceUris.isEmpty()) {
assert resourceUris.size() == this.function.getResourceUris().size();
copyObj.setResourceUris(resourceUris);
}
try {
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
// This is required otherwise correct work object on repl load wont be created.
writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.REPL_SCOPE.toString(), "all");
writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), additionalPropertiesProvider.getCurrentReplicationState());
writer.jsonGenerator.writeStringField(FIELD_NAME, serializer.toString(copyObj));
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
}
}
use of org.apache.thrift.TSerializer in project hive by apache.
the class TableSerializer method writeTo.
@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException {
if (!Utils.shouldReplicate(additionalPropertiesProvider, tableHandle, false, null, null, hiveConf)) {
return;
}
Table tTable = updatePropertiesInTable(tableHandle.getTTable(), additionalPropertiesProvider);
try {
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
writer.jsonGenerator.writeStringField(FIELD_NAME, serializer.toString(tTable));
writer.jsonGenerator.writeFieldName(PartitionSerializer.FIELD_NAME);
writePartitions(writer, additionalPropertiesProvider);
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
}
}
Aggregations