use of org.bimserver.plugins.serializers.StreamingSerializerPlugin in project BIMserver by opensourceBIM.
the class NewServicesImpl method listAvailableOutputFormats.
@Override
public List<SFormatSerializerMap> listAvailableOutputFormats(Long poid) throws ServerException, UserException {
Map<String, SFormatSerializerMap> outputs = new HashMap<>();
try (DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.READ_ONLY)) {
Project project = session.get(poid, OldQuery.getDefault());
try {
List<SSerializerPluginConfiguration> allSerializersForPoids = getServiceMap().get(PluginInterface.class).getAllSerializersForPoids(true, Collections.singleton(poid));
for (SSerializerPluginConfiguration pluginConfiguration : allSerializersForPoids) {
PluginDescriptor pluginDescriptor = session.get(pluginConfiguration.getPluginDescriptorId(), OldQuery.getDefault());
Plugin plugin = getBimServer().getPluginManager().getPlugin(pluginDescriptor.getIdentifier(), true);
String outputFormat = null;
// TODO For now only streaming serializers
if (plugin instanceof StreamingSerializerPlugin) {
outputFormat = ((StreamingSerializerPlugin) plugin).getOutputFormat(Schema.valueOf(project.getSchema().toUpperCase()));
}
if (outputFormat != null) {
SFormatSerializerMap map = outputs.get(outputFormat);
if (map == null) {
map = new SFormatSerializerMap();
map.setFormat(outputFormat);
outputs.put(outputFormat, map);
}
map.getSerializers().add(pluginConfiguration);
}
}
} catch (ServerException e) {
e.printStackTrace();
} catch (UserException e) {
e.printStackTrace();
}
return new ArrayList<>(outputs.values());
} catch (BimserverDatabaseException e) {
return handleException(e);
}
}
use of org.bimserver.plugins.serializers.StreamingSerializerPlugin in project BIMserver by opensourceBIM.
the class ServiceImpl method download.
@Override
public Long download(Set<Long> roids, String jsonQuery, Long serializerOid, Boolean sync) throws ServerException, UserException {
try {
String username = getAuthorization().getUsername();
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.READ_ONLY);
Plugin plugin = null;
try {
SerializerPluginConfiguration serializerPluginConfiguration = session.get(StorePackage.eINSTANCE.getSerializerPluginConfiguration(), serializerOid, OldQuery.getDefault());
plugin = getBimServer().getPluginManager().getPlugin(serializerPluginConfiguration.getPluginDescriptor().getPluginClassName(), true);
} catch (BimserverDatabaseException e) {
throw new UserException(e);
} finally {
session.close();
}
if (plugin instanceof StreamingSerializerPlugin || plugin instanceof MessagingStreamingSerializerPlugin) {
LongStreamingDownloadAction longDownloadAction = new LongStreamingDownloadAction(getBimServer(), username, username, getAuthorization(), serializerOid, jsonQuery, roids);
try {
getBimServer().getLongActionManager().start(longDownloadAction);
} catch (Exception e) {
LOGGER.error("", e);
}
if (sync) {
longDownloadAction.waitForCompletion();
}
return longDownloadAction.getProgressTopic().getKey().getId();
} else if (plugin instanceof MessagingSerializerPlugin) {
requireAuthenticationAndRunningServer();
DownloadParameters downloadParameters = new DownloadParameters(getBimServer(), DownloadType.DOWNLOAD_BY_NEW_JSON_QUERY);
downloadParameters.setRoids(roids);
downloadParameters.setJsonQuery(jsonQuery);
downloadParameters.setSerializerOid(serializerOid);
return download(downloadParameters, sync);
} else if (plugin instanceof SerializerPlugin) {
requireAuthenticationAndRunningServer();
DownloadParameters downloadParameters = new DownloadParameters(getBimServer(), DownloadType.DOWNLOAD_BY_NEW_JSON_QUERY);
downloadParameters.setRoids(roids);
downloadParameters.setJsonQuery(jsonQuery);
downloadParameters.setSerializerOid(serializerOid);
return download(downloadParameters, sync);
} else {
throw new UserException("Unimplemented type of plugin: " + plugin);
}
} catch (Exception e) {
return handleException(e);
}
}
use of org.bimserver.plugins.serializers.StreamingSerializerPlugin in project BIMserver by opensourceBIM.
the class StreamingGeometryGenerator method generateGeometry.
@SuppressWarnings("unchecked")
public GenerateGeometryResult generateGeometry(long uoid, final DatabaseSession databaseSession, QueryContext queryContext, long nrObjects) throws BimserverDatabaseException, GeometryGeneratingException {
GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult();
packageMetaData = queryContext.getPackageMetaData();
productClass = packageMetaData.getEClass("IfcProduct");
geometryFeature = (EReference) productClass.getEStructuralFeature("geometry");
representationFeature = productClass.getEStructuralFeature("Representation");
representationsFeature = packageMetaData.getEClass("IfcProductDefinitionShape").getEStructuralFeature("Representations");
itemsFeature = packageMetaData.getEClass("IfcShapeRepresentation").getEStructuralFeature("Items");
mappingSourceFeature = packageMetaData.getEClass("IfcMappedItem").getEStructuralFeature("MappingSource");
GregorianCalendar now = new GregorianCalendar();
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
debugIdentifier = dateFormat.format(now.getTime()) + " (" + report.getOriginalIfcFileName() + ")";
long start = System.nanoTime();
String pluginName = "";
if (queryContext.getPackageMetaData().getSchema() == Schema.IFC4) {
pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepStreamingSerializerPlugin";
} else if (queryContext.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) {
pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepStreamingSerializerPlugin";
} else {
throw new GeometryGeneratingException("Unknown schema " + queryContext.getPackageMetaData().getSchema());
}
reuseGeometry = bimServer.getServerSettingsCache().getServerSettings().isReuseGeometry();
optimizeMappedItems = bimServer.getServerSettingsCache().getServerSettings().isOptimizeMappedItems();
report.setStart(new GregorianCalendar());
report.setIfcSchema(queryContext.getPackageMetaData().getSchema());
report.setUseMappingOptimization(optimizeMappedItems);
report.setReuseGeometry(reuseGeometry);
try {
final StreamingSerializerPlugin ifcSerializerPlugin = (StreamingSerializerPlugin) bimServer.getPluginManager().getPlugin(pluginName, true);
if (ifcSerializerPlugin == null) {
throw new UserException("No IFC serializer found");
}
User user = (User) databaseSession.get(uoid, org.bimserver.database.OldQuery.getDefault());
UserSettings userSettings = user.getUserSettings();
report.setUserName(user.getName());
report.setUserUserName(user.getUsername());
RenderEnginePluginConfiguration renderEngine = null;
if (eoid != -1) {
renderEngine = databaseSession.get(eoid, OldQuery.getDefault());
} else {
renderEngine = userSettings.getDefaultRenderEngine();
}
if (renderEngine == null) {
throw new UserException("No default render engine has been selected for this user");
}
renderEngineName = renderEngine.getName();
int availableProcessors = Runtime.getRuntime().availableProcessors();
report.setAvailableProcessors(availableProcessors);
int maxSimultanousThreads = Math.min(bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(), availableProcessors);
if (maxSimultanousThreads < 1) {
maxSimultanousThreads = 1;
}
final RenderEngineSettings settings = new RenderEngineSettings();
settings.setPrecision(Precision.SINGLE);
settings.setIndexFormat(IndexFormat.AUTO_DETECT);
settings.setGenerateNormals(true);
settings.setGenerateTriangles(true);
settings.setGenerateWireFrame(false);
final RenderEngineFilter renderEngineFilter = new RenderEngineFilter();
RenderEnginePool renderEnginePool = bimServer.getRenderEnginePools().getRenderEnginePool(packageMetaData.getSchema(), renderEngine.getPluginDescriptor().getPluginClassName(), bimServer.getPluginSettingsCache().getPluginSettings(renderEngine.getOid()));
report.setRenderEngineName(renderEngine.getName());
report.setRenderEnginePluginVersion(renderEngine.getPluginDescriptor().getPluginBundleVersion().getVersion());
VersionInfo versionInfo = renderEnginePool.getRenderEngineFactory().getVersionInfo();
report.setRenderEngineVersion(versionInfo);
// TODO there must be a cleaner way of getting this info, since it's in the database...
RenderEngine engine = renderEnginePool.borrowObject();
try {
applyLayerSets = engine.isApplyLayerSets();
report.setApplyLayersets(applyLayerSets);
calculateQuantities = engine.isCalculateQuantities();
report.setCalculateQuantities(calculateQuantities);
} finally {
renderEnginePool.returnObject(engine);
}
// TODO reuse, pool the pools :) Or something smarter
// TODO reuse queue, or try to determine a realistic size, or don't use a fixed-size queue
ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads, 24, TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000000));
JsonQueryObjectModelConverter jsonQueryObjectModelConverter = new JsonQueryObjectModelConverter(packageMetaData);
String queryNameSpace = packageMetaData.getSchema().name().toLowerCase() + "-stdlib";
// All references should already be direct, since this is now done in BimServer on startup, quite the hack...
Include objectPlacement = jsonQueryObjectModelConverter.getDefineFromFile(queryNameSpace + ":ObjectPlacement", true);
Set<EClass> classes = null;
if (queryContext.getOidCounters() != null) {
classes = queryContext.getOidCounters().keySet();
} else {
classes = packageMetaData.getEClasses();
}
float multiplierToMm = processUnits(databaseSession, queryContext);
generateGeometryResult.setMultiplierToMm(multiplierToMm);
// Phase 1 (mapped item detection) sometimes detects that mapped items have invalid (unsupported) RepresentationIdentifier values, this set keeps track of objects to skip in Phase 2 because of that
Set<Long> toSkip = new HashSet<>();
// Less than 100 objects -> Use 1 object per process (so we have progress indication per 1%)
// More than 100 objects -> Use # objects / 100 objects per process
// Unless the amount of objects becomes > 100 / process, than cap it on 100
int regularObjectCount = 0;
Set<EClass> typesToDo = new HashSet<>();
Set<Long> done = new HashSet<>();
for (EClass eClass : classes) {
if (packageMetaData.getEClass("IfcProduct").isSuperTypeOf(eClass)) {
Query query2 = new Query(eClass.getName() + "Main query", packageMetaData);
QueryPart queryPart2 = query2.createQueryPart();
queryPart2.addType(eClass, false);
Include representationInclude = queryPart2.createInclude();
representationInclude.addType(eClass, false);
representationInclude.addFieldDirect("Representation");
Include representationsInclude = representationInclude.createInclude();
representationsInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
representationsInclude.addFieldDirect("Representations");
Include itemsInclude = representationsInclude.createInclude();
itemsInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), false);
itemsInclude.addFieldDirect("Items");
itemsInclude.addFieldDirect("ContextOfItems");
Include mappingSourceInclude = itemsInclude.createInclude();
mappingSourceInclude.addType(packageMetaData.getEClass("IfcMappedItem"), false);
mappingSourceInclude.addFieldDirect("MappingSource");
mappingSourceInclude.addFieldDirect("MappingTarget");
Include representationMap = mappingSourceInclude.createInclude();
representationMap.addType(packageMetaData.getEClass("IfcRepresentationMap"), false);
representationMap.addFieldDirect("MappedRepresentation");
Include createInclude = representationMap.createInclude();
createInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), true);
Include targetInclude = mappingSourceInclude.createInclude();
targetInclude.addType(packageMetaData.getEClass("IfcCartesianTransformationOperator3D"), false);
targetInclude.addFieldDirect("Axis1");
targetInclude.addFieldDirect("Axis2");
targetInclude.addFieldDirect("Axis3");
targetInclude.addFieldDirect("LocalOrigin");
queryPart2.addInclude(objectPlacement);
Map<Long, Map<Long, ProductDef>> representationMapToProduct = new HashMap<>();
QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query2, Collections.singleton(queryContext.getRoid()), packageMetaData);
HashMapVirtualObject next = queryObjectProvider2.next();
while (next != null) {
if (next.eClass() == eClass) {
AbstractHashMapVirtualObject representation = next.getDirectFeature(representationFeature);
if (representation != null) {
Set<HashMapVirtualObject> representations = representation.getDirectListFeature(representationsFeature);
if (representations != null) {
boolean foundValidContext = false;
for (HashMapVirtualObject representationItem : representations) {
if (usableContext(representationItem)) {
foundValidContext = true;
break;
}
}
boolean stop = false;
Set<String> counts = new HashSet<>();
for (HashMapVirtualObject representationItem : representations) {
String representationIdentifier = (String) representationItem.get("RepresentationIdentifier");
if (counts.contains(representationIdentifier)) {
stop = true;
} else {
counts.add(representationIdentifier);
}
}
if (stop) {
next = queryObjectProvider2.next();
continue;
}
for (HashMapVirtualObject representationItem : representations) {
if (!usableContext(representationItem) && foundValidContext) {
continue;
}
if (hasValidRepresentationIdentifier(representationItem)) {
Set<HashMapVirtualObject> items = representationItem.getDirectListFeature(itemsFeature);
if (items == null || items.size() > 1) {
// Only if there is just one item, we'll store this for reuse
// TODO actually we could store them for > 1 as well, only they should only be used (2nd stage) for products that use the exact same items, for now
regularObjectCount++;
continue;
}
// So this next loop always results in 1 (or no) loops
for (HashMapVirtualObject item : items) {
report.addRepresentationItem(item.eClass().getName());
if (!packageMetaData.getEClass("IfcMappedItem").isSuperTypeOf(item.eClass())) {
regularObjectCount++;
// All non IfcMappedItem objects will be done in phase 2
continue;
}
AbstractHashMapVirtualObject mappingTarget = item.getDirectFeature(packageMetaData.getEReference("IfcMappedItem", "MappingTarget"));
AbstractHashMapVirtualObject mappingSourceOfMappedItem = item.getDirectFeature(packageMetaData.getEReference("IfcMappedItem", "MappingSource"));
if (mappingSourceOfMappedItem == null) {
LOGGER.info("No mapping source");
continue;
}
AbstractHashMapVirtualObject mappedRepresentation = mappingSourceOfMappedItem.getDirectFeature(packageMetaData.getEReference("IfcRepresentationMap", "MappedRepresentation"));
if (!hasValidRepresentationIdentifier(mappedRepresentation)) {
// Skip this mapping, we should store somewhere that this object should also be skipped in the normal way
String identifier = (String) mappedRepresentation.get("RepresentationIdentifier");
report.addSkippedBecauseOfInvalidRepresentationIdentifier(identifier);
toSkip.add(next.getOid());
continue;
}
double[] mappingMatrix = Matrix.identity();
double[] productMatrix = Matrix.identity();
if (mappingTarget != null) {
AbstractHashMapVirtualObject axis1 = mappingTarget.getDirectFeature(packageMetaData.getEReference("IfcCartesianTransformationOperator", "Axis1"));
AbstractHashMapVirtualObject axis2 = mappingTarget.getDirectFeature(packageMetaData.getEReference("IfcCartesianTransformationOperator", "Axis2"));
AbstractHashMapVirtualObject axis3 = mappingTarget.getDirectFeature(packageMetaData.getEReference("IfcCartesianTransformationOperator3D", "Axis3"));
AbstractHashMapVirtualObject localOrigin = mappingTarget.getDirectFeature(packageMetaData.getEReference("IfcCartesianTransformationOperator", "LocalOrigin"));
double[] a1 = null;
double[] a2 = null;
double[] a3 = null;
if (axis3 != null) {
List<Double> list = (List<Double>) axis3.get("DirectionRatios");
a3 = new double[] { list.get(0), list.get(1), list.get(2) };
Vector.normalize(a3);
} else {
a3 = new double[] { 0, 0, 1 };
}
if (axis1 != null) {
List<Double> list = (List<Double>) axis1.get("DirectionRatios");
a1 = new double[] { list.get(0), list.get(1), list.get(2) };
Vector.normalize(a1);
} else {
if (a3[0] == 1 && a3[1] == 0 && a3[2] == 0) {
a1 = new double[] { 0, 1, 0 };
} else {
a1 = new double[] { 1, 0, 0 };
}
}
double[] xVec = Vector.scalarProduct(Vector.dot(a1, a3), a3);
double[] xAxis = Vector.subtract(a1, xVec);
Vector.normalize(xAxis);
if (axis2 != null) {
List<Double> list = (List<Double>) axis2.get("DirectionRatios");
a2 = new double[] { list.get(0), list.get(1), list.get(2) };
Vector.normalize(a2);
} else {
a2 = new double[] { 0, 1, 0 };
}
double[] tmp = Vector.scalarProduct(Vector.dot(a2, a3), a3);
double[] yAxis = Vector.subtract(a2, tmp);
tmp = Vector.scalarProduct(Vector.dot(a2, xAxis), xAxis);
yAxis = Vector.subtract(yAxis, tmp);
Vector.normalize(yAxis);
a2 = yAxis;
a1 = xAxis;
List<Double> t = (List<Double>) localOrigin.get("Coordinates");
mappingMatrix = new double[] { a1[0], a1[1], a1[2], 0, a2[0], a2[1], a2[2], 0, a3[0], a3[1], a3[2], 0, t.get(0).doubleValue(), t.get(1).doubleValue(), t.get(2).doubleValue(), 1 };
}
AbstractHashMapVirtualObject placement = next.getDirectFeature(packageMetaData.getEReference("IfcProduct", "ObjectPlacement"));
if (placement != null) {
productMatrix = placementToMatrix(placement);
}
AbstractHashMapVirtualObject mappingSource = item.getDirectFeature(mappingSourceFeature);
if (mappingSource != null) {
Map<Long, ProductDef> map = representationMapToProduct.get(((HashMapVirtualObject) mappingSource).getOid());
if (map == null) {
map = new LinkedHashMap<>();
representationMapToProduct.put(((HashMapVirtualObject) mappingSource).getOid(), map);
}
ProductDef pd = new ProductDef(next.getOid());
pd.setMappedItemOid(item.getOid());
pd.setObject(next);
pd.setProductMatrix(productMatrix);
pd.setMappingMatrix(mappingMatrix);
pd.setRepresentationOid(representationItem.getOid());
map.put(next.getOid(), pd);
}
}
} else {
report.addSkippedBecauseOfInvalidRepresentationIdentifier((String) representationItem.get("RepresentationIdentifier"));
}
}
}
}
}
next = queryObjectProvider2.next();
}
for (Long repMapId : representationMapToProduct.keySet()) {
Map<Long, ProductDef> map = representationMapToProduct.get(repMapId);
// When there is more than one instance using this mapping
if (map.size() > 1) {
Query query = new Query("Reuse query " + eClass.getName(), packageMetaData);
query.setDoubleBuffer(true);
QueryPart queryPart = query.createQueryPart();
// QueryPart queryPart3 = query.createQueryPart();
queryPart.addType(eClass, false);
// queryPart3.addType(packageMetaData.getEClass("IfcMappedItem"), false);
long masterOid = map.values().iterator().next().getOid();
double[] inverted = Matrix.identity();
ProductDef masterProductDef = map.get(masterOid);
if (!Matrix.invertM(inverted, 0, masterProductDef.getMappingMatrix(), 0)) {
LOGGER.debug("No inverse, this mapping will be skipped and processed as normal");
// TODO we should however be able to squeeze out a little more reuse by finding another master...
continue;
}
Set<Long> representationOids = new HashSet<>();
for (ProductDef pd : map.values()) {
done.add(pd.getOid());
if (!optimizeMappedItems) {
queryPart.addOid(pd.getOid());
representationOids.add(pd.getRepresentationOid());
// In theory these should be fused together during querying
// queryPart3.addOid(pd.getMappedItemOid());
} else {
pd.setMasterOid(masterOid);
}
}
if (optimizeMappedItems) {
queryPart.addOid(masterOid);
representationOids.add(masterProductDef.getRepresentationOid());
}
LOGGER.debug("Running " + map.size() + " objects in one batch because of reused geometry " + (eClass.getName()));
// queryPart3.addInclude(jsonQueryObjectModelConverter.getDefineFromFile("ifc2x3tc1-stdlib:IfcMappedItem"));
processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, true, map, map.size(), representationOids);
}
}
typesToDo.add(eClass);
}
}
// LOGGER.info("Regular object count: " + regularObjectCount);
int maxObjectsPerFile = regularObjectCount / 100;
if (regularObjectCount < 100) {
maxObjectsPerFile = 1;
}
if (maxObjectsPerFile > 100) {
maxObjectsPerFile = 100;
}
// maxObjectsPerFile = 1;
report.setMaxPerFile(maxObjectsPerFile);
for (EClass eClass : typesToDo) {
Query query3 = new Query("Remaining " + eClass.getName(), packageMetaData);
QueryPart queryPart3 = query3.createQueryPart();
queryPart3.addType(eClass, false);
Include include3 = queryPart3.createInclude();
include3.addType(eClass, false);
include3.addFieldDirect("Representation");
Include rInclude = include3.createInclude();
rInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
rInclude.addFieldDirect("Representations");
Include representationsInclude2 = rInclude.createInclude();
representationsInclude2.addType(packageMetaData.getEClass("IfcShapeModel"), true);
representationsInclude2.addFieldDirect("ContextOfItems");
Query query = new Query("Main " + eClass.getName(), packageMetaData);
query.setDoubleBuffer(true);
QueryPart queryPart = query.createQueryPart();
int written = 0;
QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query3, Collections.singleton(queryContext.getRoid()), packageMetaData);
HashMapVirtualObject next = queryObjectProvider2.next();
Set<Long> representationOids = new HashSet<>();
while (next != null) {
// Not sure why the duplicate code in the next 20 lines
if (next.eClass() == eClass && !done.contains(next.getOid()) && !toSkip.contains(next.getOid())) {
AbstractHashMapVirtualObject representation = next.getDirectFeature(representationFeature);
if (representation != null) {
Set<HashMapVirtualObject> list = representation.getDirectListFeature(packageMetaData.getEReference("IfcProductRepresentation", "Representations"));
Set<Long> goForIt = goForIt(list);
if (!goForIt.isEmpty()) {
if (next.eClass() == eClass && !done.contains(next.getOid())) {
representation = next.getDirectFeature(representationFeature);
if (representation != null) {
list = representation.getDirectListFeature(packageMetaData.getEReference("IfcProductRepresentation", "Representations"));
Set<Long> goForIt2 = goForIt(list);
if (!goForIt2.isEmpty()) {
queryPart.addOid(next.getOid());
representationOids.addAll(goForIt2);
written++;
if (written >= maxObjectsPerFile) {
processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, false, null, written, representationOids);
query = new Query("Main " + eClass.getName(), packageMetaData);
query.setDoubleBuffer(true);
queryPart = query.createQueryPart();
written = 0;
representationOids.clear();
}
}
}
}
}
}
}
next = queryObjectProvider2.next();
}
if (written > 0) {
processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, false, null, written, representationOids);
}
}
allJobsPushed = true;
executor.shutdown();
executor.awaitTermination(24, TimeUnit.HOURS);
// TODO, disable?
if (true) {
LOGGER.debug("Generating quantized vertices");
double[] quantizationMatrix = createQuantizationMatrixFromBounds(generateGeometryResult.getBoundsUntransformed(), multiplierToMm);
for (Long id : geometryDataMap.keySet()) {
Tuple<HashMapVirtualObject, ByteBuffer> tuple = geometryDataMap.get(id);
HashMapVirtualObject buffer = new HashMapVirtualObject(queryContext, GeometryPackage.eINSTANCE.getBuffer());
// Buffer buffer = databaseSession.create(Buffer.class);
buffer.set("data", quantizeVertices(tuple.getB().asDoubleBuffer(), quantizationMatrix, multiplierToMm).array());
// buffer.setData(quantizeVertices(tuple.getB(), quantizationMatrix, multiplierToMm).array());
// databaseSession.store(buffer);
buffer.save();
HashMapVirtualObject geometryData = tuple.getA();
geometryData.set("verticesQuantized", buffer.getOid());
int reused = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_Reused());
int nrTriangles = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_NrIndices()) / 3;
int saveableTriangles = Math.max(0, (reused - 1)) * nrTriangles;
geometryData.set("saveableTriangles", saveableTriangles);
// if (saveableTriangles > 0) {
// System.out.println("Saveable triangles: " + saveableTriangles);
// }
geometryData.saveOverwrite();
}
}
long end = System.nanoTime();
long total = totalBytes.get() - (bytesSavedByHash.get() + bytesSavedByTransformation.get() + bytesSavedByMapping.get());
LOGGER.info("Rendertime: " + Formatters.nanosToString(end - start) + ", " + "Reused (by hash): " + Formatters.bytesToString(bytesSavedByHash.get()) + ", Reused (by transformation): " + Formatters.bytesToString(bytesSavedByTransformation.get()) + ", Reused (by mapping): " + Formatters.bytesToString(bytesSavedByMapping.get()) + ", Total: " + Formatters.bytesToString(totalBytes.get()) + ", Final: " + Formatters.bytesToString(total));
if (report.getNumberOfDebugFiles() > 0) {
LOGGER.error("[" + report.getOriginalIfcFileName() + "] Number of erroneous files: " + report.getNumberOfDebugFiles());
}
SkippedBecauseOfInvalidRepresentation skipped = report.getSkippedBecauseOfInvalidRepresentationIdentifier();
if (skipped.hasImportant()) {
LOGGER.warn("[" + report.getOriginalIfcFileName() + "] Number of representations skipped:");
for (String identifier : skipped.getImportantSet()) {
LOGGER.warn("\t" + identifier + ": " + skipped.get(identifier));
}
}
String dump = geometryGenerationDebugger.dump();
if (dump != null) {
LOGGER.info(dump);
}
} catch (Exception e) {
running = false;
LOGGER.error("", e);
report.setEnd(new GregorianCalendar());
throw new GeometryGeneratingException(e);
}
report.setEnd(new GregorianCalendar());
try {
if (report.getNumberOfDebugFiles() > 0) {
writeDebugFile();
}
} catch (IOException e) {
LOGGER.debug("", e);
}
return generateGeometryResult;
}
use of org.bimserver.plugins.serializers.StreamingSerializerPlugin in project BIMserver by opensourceBIM.
the class PluginServiceImpl method getAllSerializersForPoids.
@Override
public List<SSerializerPluginConfiguration> getAllSerializersForPoids(Boolean onlyEnabled, Set<Long> poids) throws ServerException, UserException {
requireRealUserAuthentication();
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.READ_ONLY);
try {
Set<Schema> uniqueSchemas = new HashSet<>();
for (Long poid : poids) {
Project project = session.get(poid, OldQuery.getDefault());
uniqueSchemas.add(Schema.valueOf(project.getSchema().toUpperCase()));
}
Set<Schema> schemaOr = new HashSet<>();
if (uniqueSchemas.size() == 0) {
// Weird, no schemas
} else if (uniqueSchemas.size() == 1) {
// Easy, just add it, and see if there are converter targets and
// add those too
Schema schema = uniqueSchemas.iterator().next();
schemaOr.add(schema);
// TODO make recursive
for (Schema target : getBimServer().getSchemaConverterManager().getSchemaTargets(schema)) {
schemaOr.add(target);
}
} else if (uniqueSchemas.size() == 2) {
// This is harder, if we have 2 schema, we must figure out a way
// to convert to 1 schema, and then filter the allowed source
// schemas
Iterator<Schema> iterator = uniqueSchemas.iterator();
Schema schema1 = iterator.next();
Schema schema2 = iterator.next();
SchemaConverterFactory converter1 = getBimServer().getSchemaConverterManager().getSchemaConverterFactory(schema1, schema2);
SchemaConverterFactory converter2 = getBimServer().getSchemaConverterManager().getSchemaConverterFactory(schema2, schema1);
if (converter1 != null) {
schemaOr.add(schema1);
}
if (converter2 != null) {
schemaOr.add(schema2);
}
} else {
throw new ServerException("Unimplemented, no support for > 2 schemas");
}
UserSettings userSettings = getUserSettings(session);
List<SSerializerPluginConfiguration> sSerializers = new ArrayList<SSerializerPluginConfiguration>();
for (SerializerPluginConfiguration serializerPluginConfiguration : userSettings.getSerializers()) {
Plugin plugin = getBimServer().getPluginManager().getPlugin(serializerPluginConfiguration.getPluginDescriptor().getPluginClassName(), true);
if (plugin instanceof SerializerPlugin) {
SerializerPlugin serializerPlugin = getBimServer().getPluginManager().getSerializerPlugin(serializerPluginConfiguration.getPluginDescriptor().getPluginClassName(), true);
for (Schema schema : serializerPlugin.getSupportedSchemas()) {
if (schemaOr.contains(schema)) {
if (!onlyEnabled || (serializerPluginConfiguration.getEnabled() && serializerPluginConfiguration.getPluginDescriptor().getEnabled())) {
sSerializers.add(getBimServer().getSConverter().convertToSObject(serializerPluginConfiguration));
break;
}
}
}
} else if (plugin instanceof StreamingSerializerPlugin) {
StreamingSerializerPlugin streamingSerializerPlugin = (StreamingSerializerPlugin) plugin;
for (Schema schema : streamingSerializerPlugin.getSupportedSchemas()) {
if (schemaOr.contains(schema)) {
if (!onlyEnabled || (serializerPluginConfiguration.getEnabled() && serializerPluginConfiguration.getPluginDescriptor().getEnabled())) {
sSerializers.add(getBimServer().getSConverter().convertToSObject(serializerPluginConfiguration));
break;
}
}
}
}
}
Collections.sort(sSerializers, new SPluginConfigurationComparator());
return sSerializers;
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
return null;
}
Aggregations