use of org.alfresco.service.cmr.repository.TransformationOptionLimits in project alfresco-repository by Alfresco.
the class TransformerConfigLimitsTest method useUnsetTest.
@Test
public // Checks limit does not change if use param is specified but not specifically set
void useUnsetTest() {
mockProperties(transformerProperties, "content.transformer.transformer1.pageLimit", "76");
extractor = new TransformerConfigLimits(transformerProperties, mimetypeService);
TransformationOptionLimits transformerDefaultLimits = extractor.getLimits(transformer1, null, null, null);
TransformationOptionLimits transformerDoclibLimits = extractor.getLimits(transformer1, null, null, "doclib");
assertEquals(76, transformerDefaultLimits.getPageLimit());
assertEquals(76, transformerDoclibLimits.getPageLimit());
}
use of org.alfresco.service.cmr.repository.TransformationOptionLimits in project alfresco-repository by Alfresco.
the class AbstractContentTransformer2 method transform.
/**
* @see org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader, org.alfresco.service.cmr.repository.ContentWriter, org.alfresco.service.cmr.repository.TransformationOptions)
*/
public void transform(ContentReader reader, ContentWriter writer, TransformationOptions options) throws ContentIOException {
try {
depth.set(depth.get() + 1);
// begin timing
long before = System.currentTimeMillis();
String sourceMimetype = reader.getMimetype();
String targetMimetype = writer.getMimetype();
// check options map
if (options == null) {
options = new TransformationOptions();
}
try {
if (transformerDebug.isEnabled()) {
((LegacyTransformerDebug) transformerDebug).pushTransform(this, reader.getContentUrl(), sourceMimetype, targetMimetype, reader.getSize(), options);
}
// MNT-16381: check the mimetype of the file supplied by the user
// matches the sourceMimetype of the reader. Intermediate files are
// not checked.
strictMimetypeCheck(reader, options, sourceMimetype);
// Check the transformability
checkTransformable(reader, writer, options);
// Pass on any limits to the reader
setReaderLimits(reader, writer, options);
// Transform
// MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system instability
// Limiting transformation up to configured amount of milliseconds to avoid very high RAM consumption
// and OOM during transforming problematic documents
TransformationOptionLimits limits = getLimits(reader.getMimetype(), writer.getMimetype(), options);
long timeoutMs = limits.getTimeoutMs();
if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) {
transformInternal(reader, writer, options);
} else {
Future<?> submittedTask = null;
StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader);
StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer);
try {
submittedTask = getExecutorService().submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options));
submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
releaseResources(submittedTask, proxiedReader, proxiedWriter);
throw new TimeoutException("Transformation failed due to timeout limit");
} catch (InterruptedException e) {
releaseResources(submittedTask, proxiedReader, proxiedWriter);
throw new InterruptedException("Transformation failed, because the thread of the transformation was interrupted");
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof TransformInternalCallableException) {
cause = ((TransformInternalCallableException) cause).getCause();
}
throw cause;
}
}
// record time
long after = System.currentTimeMillis();
recordTime(sourceMimetype, targetMimetype, after - before);
} catch (ContentServiceTransientException cste) {
// update the transformer's average time.
if (logger.isDebugEnabled()) {
logger.debug("Transformation has been transiently declined: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options + "\n" + " transformer: " + this);
}
// We rethrow the exception
throw cste;
} catch (UnsupportedTransformationException e) {
// Don't record an error or even the time, as this is normal in compound transformations.
transformerDebug.debug(" Failed", e);
throw e;
} catch (Throwable e) {
// Make sure that this transformation gets set back i.t.o. time taken.
// This will ensure that transformers that compete for the same transformation
// will be prejudiced against transformers that tend to fail
long after = System.currentTimeMillis();
recordError(sourceMimetype, targetMimetype, after - before);
// Ask Tika to detect the document, and report back on if
// the current mime type is plausible
String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader());
// Report the error
if (differentType == null) {
transformerDebug.debug(" Failed", e);
throw new ContentIOException("Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options), e);
} else {
transformerDebug.debug(" Failed: Mime type was '" + differentType + "'", e);
if (retryTransformOnDifferentMimeType) {
// MNT-11015 fix.
// Set a new reader to refresh the input stream.
reader = reader.getReader();
// set the actual file MIME type detected by Tika for content reader
reader.setMimetype(differentType);
// Get correct transformer according actual file MIME type and try to transform file with
// actual transformer
ContentTransformer transformer = this.registry.getTransformer(differentType, reader.getSize(), targetMimetype, options);
if (null != transformer) {
transformer.transform(reader, writer, options);
} else {
transformerDebug.debug(" Failed", e);
throw new ContentIOException("Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options) + "\n" + " claimed mime type: " + reader.getMimetype() + "\n" + " detected mime type: " + differentType + "\n" + " transformer not found" + "\n", e);
}
} else {
throw new ContentIOException("Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options) + "\n" + " claimed mime type: " + reader.getMimetype() + "\n" + " detected mime type: " + differentType, e);
}
}
} finally {
transformerDebug.popTransform();
// check that the reader and writer are both closed
if (reader.isChannelOpen()) {
logger.error("Content reader not closed by transformer: \n" + " reader: " + reader + "\n" + " transformer: " + this);
}
if (writer.isChannelOpen()) {
logger.error("Content writer not closed by transformer: \n" + " writer: " + writer + "\n" + " transformer: " + this);
}
}
// done
if (logger.isDebugEnabled()) {
logger.debug("Completed transformation: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options + "\n" + " transformer: " + this);
}
} finally {
depth.set(depth.get() - 1);
}
}
use of org.alfresco.service.cmr.repository.TransformationOptionLimits in project alfresco-repository by Alfresco.
the class AbstractContentTransformerLimits method getMaxSourceSizeKBytes.
/**
* Returns the maximum source size (in KBytes) allowed given the supplied values.
* @return 0 if the the transformation is disabled, -1 if there is no limit, otherwise the size in KBytes.
*/
@Override
public long getMaxSourceSizeKBytes(String sourceMimetype, String targetMimetype, TransformationOptions options) {
long maxSourceSizeKBytes = -1;
// The maxSourceSizeKbytes value is ignored if this transformer is able to use
// page limits and the limits include a pageLimit. Normally used in the creation
// of icons. Note the readLimitKBytes value is not checked as the combined limits
// only have the max or limit KBytes value set (the smaller value is returned).
TransformationOptionLimits limits = getLimits(sourceMimetype, targetMimetype, options);
if (!isPageLimitSupported(sourceMimetype, targetMimetype, options) || limits.getPageLimit() <= 0) {
maxSourceSizeKBytes = limits.getMaxSourceSizeKBytes();
}
return maxSourceSizeKBytes;
}
use of org.alfresco.service.cmr.repository.TransformationOptionLimits in project alfresco-repository by Alfresco.
the class AbstractContentTransformerLimits method setReaderLimits.
/**
* Pass on any limits to the reader. Will only do so if the reader is an
* {@link AbstractContentReader}.
* @param reader passed to {@link #transform(ContentReader, ContentWriter, TransformationOptions)}.
* @param writer passed to {@link #transform(ContentReader, ContentWriter, TransformationOptions)}.
* @param options passed to {@link #transform(ContentReader, ContentWriter, TransformationOptions)}.
*/
protected void setReaderLimits(ContentReader reader, ContentWriter writer, TransformationOptions options) {
if (reader instanceof AbstractContentReader) {
AbstractContentReader abstractContentReader = (AbstractContentReader) reader;
TransformationOptionLimits limits = getLimits(reader, writer, options);
abstractContentReader.setLimits(limits);
abstractContentReader.setTransformerDebug(transformerDebug);
}
}
use of org.alfresco.service.cmr.repository.TransformationOptionLimits in project alfresco-repository by Alfresco.
the class TextToPdfContentTransformer method transformLocal.
@Override
protected void transformLocal(ContentReader reader, ContentWriter writer, TransformationOptions options) throws Exception {
PDDocument pdf = null;
InputStream is = null;
InputStreamReader ir = null;
OutputStream os = null;
try {
is = reader.getContentInputStream();
ir = buildReader(is, reader.getEncoding(), reader.getContentUrl());
TransformationOptionLimits limits = getLimits(reader, writer, options);
TransformationOptionPair pageLimits = limits.getPagesPair();
pdf = transformer.createPDFFromText(ir, pageLimits, reader.getContentUrl(), transformerDebug);
// dump it all to the writer
os = writer.getContentOutputStream();
pdf.save(os);
} finally {
if (pdf != null) {
try {
pdf.close();
} catch (Throwable e) {
e.printStackTrace();
}
}
if (ir != null) {
try {
ir.close();
} catch (Throwable e) {
e.printStackTrace();
}
}
if (is != null) {
try {
is.close();
} catch (Throwable e) {
e.printStackTrace();
}
}
if (os != null) {
try {
os.close();
} catch (Throwable e) {
e.printStackTrace();
}
}
}
}
Aggregations