use of org.apache.poi.hslf.record.Record in project poi by apache.
the class HSLFSlideShowEncrypted method addEncryptionRecord.
protected static Record[] addEncryptionRecord(Record[] records, DocumentEncryptionAtom dea) {
assert (dea != null);
int ueaIdx = -1, ptrIdx = -1, deaIdx = -1, idx = -1;
for (Record r : records) {
idx++;
if (r instanceof UserEditAtom) {
ueaIdx = idx;
} else if (r instanceof PersistPtrHolder) {
ptrIdx = idx;
} else if (r instanceof DocumentEncryptionAtom) {
deaIdx = idx;
}
}
assert (ueaIdx != -1 && ptrIdx != -1 && ptrIdx < ueaIdx);
if (deaIdx != -1) {
DocumentEncryptionAtom deaOld = (DocumentEncryptionAtom) records[deaIdx];
dea.setLastOnDiskOffset(deaOld.getLastOnDiskOffset());
records[deaIdx] = dea;
return records;
} else {
PersistPtrHolder ptr = (PersistPtrHolder) records[ptrIdx];
UserEditAtom uea = ((UserEditAtom) records[ueaIdx]);
dea.setLastOnDiskOffset(ptr.getLastOnDiskOffset() - 1);
int nextSlideId = uea.getMaxPersistWritten() + 1;
ptr.addSlideLookup(nextSlideId, ptr.getLastOnDiskOffset() - 1);
uea.setEncryptSessionPersistIdRef(nextSlideId);
uea.setMaxPersistWritten(nextSlideId);
Record[] newRecords = new Record[records.length + 1];
if (ptrIdx > 0) {
System.arraycopy(records, 0, newRecords, 0, ptrIdx);
}
if (ptrIdx < records.length - 1) {
System.arraycopy(records, ptrIdx, newRecords, ptrIdx + 1, records.length - ptrIdx);
}
newRecords[ptrIdx] = dea;
return newRecords;
}
}
use of org.apache.poi.hslf.record.Record in project poi by apache.
the class HSLFSlideShowEncrypted method normalizeRecords.
/**
* remove duplicated UserEditAtoms and merge PersistPtrHolder.
* Before this method is called, make sure that the offsets are correct,
* i.e. call {@link HSLFSlideShowImpl#updateAndWriteDependantRecords(OutputStream, Map)}
*/
protected static Record[] normalizeRecords(Record[] records) {
// http://msdn.microsoft.com/en-us/library/office/gg615594(v=office.14).aspx
// repeated slideIds can be overwritten, i.e. ignored
UserEditAtom uea = null;
PersistPtrHolder pph = null;
TreeMap<Integer, Integer> slideLocations = new TreeMap<Integer, Integer>();
TreeMap<Integer, Record> recordMap = new TreeMap<Integer, Record>();
List<Integer> obsoleteOffsets = new ArrayList<Integer>();
int duplicatedCount = 0;
for (Record r : records) {
assert (r instanceof PositionDependentRecord);
PositionDependentRecord pdr = (PositionDependentRecord) r;
if (pdr instanceof UserEditAtom) {
uea = (UserEditAtom) pdr;
continue;
}
if (pdr instanceof PersistPtrHolder) {
if (pph != null) {
duplicatedCount++;
}
pph = (PersistPtrHolder) pdr;
for (Map.Entry<Integer, Integer> me : pph.getSlideLocationsLookup().entrySet()) {
Integer oldOffset = slideLocations.put(me.getKey(), me.getValue());
if (oldOffset != null) {
obsoleteOffsets.add(oldOffset);
}
}
continue;
}
recordMap.put(pdr.getLastOnDiskOffset(), r);
}
if (uea == null || pph == null || uea.getPersistPointersOffset() != pph.getLastOnDiskOffset()) {
throw new EncryptedDocumentException("UserEditAtom and PersistPtrHolder must exist and their offset need to match.");
}
recordMap.put(pph.getLastOnDiskOffset(), pph);
recordMap.put(uea.getLastOnDiskOffset(), uea);
if (duplicatedCount == 0 && obsoleteOffsets.isEmpty()) {
return records;
}
uea.setLastUserEditAtomOffset(0);
pph.clear();
for (Map.Entry<Integer, Integer> me : slideLocations.entrySet()) {
pph.addSlideLookup(me.getKey(), me.getValue());
}
for (Integer oldOffset : obsoleteOffsets) {
recordMap.remove(oldOffset);
}
return recordMap.values().toArray(new Record[recordMap.size()]);
}
use of org.apache.poi.hslf.record.Record in project poi by apache.
the class HSLFSlideShowImpl method updateAndWriteDependantRecords.
/**
* This is a helper functions, which is needed for adding new position dependent records
* or finally write the slideshow to a file.
*
* @param os the stream to write to, if null only the references are updated
* @param interestingRecords a map of interesting records (PersistPtrHolder and UserEditAtom)
* referenced by their RecordType. Only the very last of each type will be saved to the map.
* May be null, if not needed.
* @throws IOException
*/
public void updateAndWriteDependantRecords(OutputStream os, Map<RecordTypes, PositionDependentRecord> interestingRecords) throws IOException {
// For position dependent records, hold where they were and now are
// As we go along, update, and hand over, to any Position Dependent
// records we happen across
Map<Integer, Integer> oldToNewPositions = new HashMap<Integer, Integer>();
// First pass - figure out where all the position dependent
// records are going to end up, in the new scheme
// (Annoyingly, some powerpoint files have PersistPtrHolders
// that reference slides after the PersistPtrHolder)
UserEditAtom usr = null;
PersistPtrHolder ptr = null;
CountingOS cos = new CountingOS();
for (Record record : _records) {
// all top level records are position dependent
assert (record instanceof PositionDependentRecord);
PositionDependentRecord pdr = (PositionDependentRecord) record;
int oldPos = pdr.getLastOnDiskOffset();
int newPos = cos.size();
pdr.setLastOnDiskOffset(newPos);
if (oldPos != UNSET_OFFSET) {
// new records don't need a mapping, as they aren't in a relation yet
oldToNewPositions.put(oldPos, newPos);
}
// Grab interesting records as they come past
// this will only save the very last record of each type
RecordTypes saveme = null;
int recordType = (int) record.getRecordType();
if (recordType == RecordTypes.PersistPtrIncrementalBlock.typeID) {
saveme = RecordTypes.PersistPtrIncrementalBlock;
ptr = (PersistPtrHolder) pdr;
} else if (recordType == RecordTypes.UserEditAtom.typeID) {
saveme = RecordTypes.UserEditAtom;
usr = (UserEditAtom) pdr;
}
if (interestingRecords != null && saveme != null) {
interestingRecords.put(saveme, pdr);
}
// Dummy write out, so the position winds on properly
record.writeOut(cos);
}
cos.close();
if (usr == null || ptr == null) {
throw new HSLFException("UserEditAtom or PersistPtr can't be determined.");
}
Map<Integer, Integer> persistIds = new HashMap<Integer, Integer>();
for (Map.Entry<Integer, Integer> entry : ptr.getSlideLocationsLookup().entrySet()) {
persistIds.put(oldToNewPositions.get(entry.getValue()), entry.getKey());
}
HSLFSlideShowEncrypted encData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom());
for (Record record : _records) {
assert (record instanceof PositionDependentRecord);
// We've already figured out their new location, and
// told them that
// Tell them of the positions of the other records though
PositionDependentRecord pdr = (PositionDependentRecord) record;
Integer persistId = persistIds.get(pdr.getLastOnDiskOffset());
if (persistId == null) {
persistId = 0;
}
// For now, we're only handling PositionDependentRecord's that
// happen at the top level.
// In future, we'll need the handle them everywhere, but that's
// a bit trickier
pdr.updateOtherRecordReferences(oldToNewPositions);
// Whatever happens, write out that record tree
if (os != null) {
record.writeOut(encData.encryptRecord(os, persistId, record));
}
}
encData.close();
// Update and write out the Current User atom
int oldLastUserEditAtomPos = (int) currentUser.getCurrentEditOffset();
Integer newLastUserEditAtomPos = oldToNewPositions.get(oldLastUserEditAtomPos);
if (newLastUserEditAtomPos == null || usr.getLastOnDiskOffset() != newLastUserEditAtomPos) {
throw new HSLFException("Couldn't find the new location of the last UserEditAtom that used to be at " + oldLastUserEditAtomPos);
}
currentUser.setCurrentEditOffset(usr.getLastOnDiskOffset());
}
use of org.apache.poi.hslf.record.Record in project poi by apache.
the class HSLFTextParagraph method updateHyperlinks.
private static void updateHyperlinks(List<HSLFTextParagraph> paragraphs) {
TextHeaderAtom headerAtom = paragraphs.get(0)._headerAtom;
RecordContainer _txtbox = headerAtom.getParentRecord();
// remove existing hyperlink records
for (Record r : _txtbox.getChildRecords()) {
if (r instanceof InteractiveInfo || r instanceof TxInteractiveInfoAtom) {
_txtbox.removeChild(r);
}
}
// now go through all the textruns and check for hyperlinks
HSLFHyperlink lastLink = null;
for (HSLFTextParagraph para : paragraphs) {
for (HSLFTextRun run : para) {
HSLFHyperlink thisLink = run.getHyperlink();
if (thisLink != null && thisLink == lastLink) {
// the hyperlink extends over this text run, increase its length
// TODO: the text run might be longer than the hyperlink
thisLink.setEndIndex(thisLink.getEndIndex() + run.getLength());
} else {
if (lastLink != null) {
InteractiveInfo info = lastLink.getInfo();
TxInteractiveInfoAtom txinfo = lastLink.getTextRunInfo();
assert (info != null && txinfo != null);
_txtbox.appendChildRecord(info);
_txtbox.appendChildRecord(txinfo);
}
}
lastLink = thisLink;
}
}
if (lastLink != null) {
InteractiveInfo info = lastLink.getInfo();
TxInteractiveInfoAtom txinfo = lastLink.getTextRunInfo();
assert (info != null && txinfo != null);
_txtbox.appendChildRecord(info);
_txtbox.appendChildRecord(txinfo);
}
}
use of org.apache.poi.hslf.record.Record in project poi by apache.
the class TextStyleListing method main.
public static void main(String[] args) throws IOException {
if (args.length < 1) {
System.err.println("Need to give a filename");
System.exit(1);
}
HSLFSlideShowImpl ss = new HSLFSlideShowImpl(args[0]);
// Find the documents, and then their SLWT
Record[] records = ss.getRecords();
for (int i = 0; i < records.length; i++) {
if (records[i].getRecordType() == 1000l) {
Record docRecord = records[i];
Record[] docChildren = docRecord.getChildRecords();
for (int j = 0; j < docChildren.length; j++) {
if (docChildren[j] instanceof SlideListWithText) {
Record[] slwtChildren = docChildren[j].getChildRecords();
int lastTextLen = -1;
for (int k = 0; k < slwtChildren.length; k++) {
if (slwtChildren[k] instanceof TextCharsAtom) {
lastTextLen = ((TextCharsAtom) slwtChildren[k]).getText().length();
}
if (slwtChildren[k] instanceof TextBytesAtom) {
lastTextLen = ((TextBytesAtom) slwtChildren[k]).getText().length();
}
if (slwtChildren[k] instanceof StyleTextPropAtom) {
StyleTextPropAtom stpa = (StyleTextPropAtom) slwtChildren[k];
stpa.setParentTextSize(lastTextLen);
showStyleTextPropAtom(stpa);
}
}
}
}
}
}
ss.close();
}
Aggregations