use of org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData in project pentaho-platform by pentaho.
the class UnifiedRepositoryToWebServiceAdapterIT method testFileMetadata.
@Test
public void testFileMetadata() throws Exception {
login(sysAdminUserName, systemTenant, new String[] { tenantAdminRoleName, tenantAuthenticatedRoleName });
ITenant mainTenant_1 = tenantManager.createTenant(systemTenant, MAIN_TENANT_1, tenantAdminRoleName, tenantAuthenticatedRoleName, ANONYMOUS_ROLE_NAME);
userRoleDao.createUser(mainTenant_1, USERNAME_ADMIN, PASSWORD, "", new String[] { tenantAdminRoleName });
login(USERNAME_ADMIN, mainTenant_1, new String[] { tenantAuthenticatedRoleName });
RepositoryFile file = repo.getFile(ClientRepositoryPaths.getPublicFolderPath());
final RepositoryFile testfile = repo.createFile(file.getId(), new RepositoryFile.Builder("testfile").build(), new SimpleRepositoryFileData(new ByteArrayInputStream("test".getBytes()), "UTF-8", "text/plain"), null);
// CHECKSTYLE IGNORE AvoidNestedBlocks FOR NEXT 3 LINES
{
// Make sure the repository is setup correctly
assertNotNull(testfile);
assertNotNull(testfile.getId());
final Map<String, Serializable> fileMetadata = repo.getFileMetadata(testfile.getId());
assertNotNull(fileMetadata);
assertEquals(1, fileMetadata.size());
}
final Map<String, Serializable> metadata = new HashMap<String, Serializable>();
metadata.put("sample key", "sample value");
metadata.put("complex key?", "\"an even more 'complex' value\"! {and them some}");
adapter.setFileMetadata(testfile.getId(), metadata);
// CHECKSTYLE IGNORE AvoidNestedBlocks FOR NEXT 3 LINES
{
// Make sure the repository sees the metadata
assertNotNull(testfile);
assertNotNull(testfile.getId());
final Map<String, Serializable> fileMetadata = repo.getFileMetadata(testfile.getId());
assertNotNull(fileMetadata);
assertEquals(2, fileMetadata.size());
}
// CHECKSTYLE IGNORE AvoidNestedBlocks FOR NEXT 3 LINES
{
// Make sure we can get the same metadata back via the web service
final Map<String, Serializable> fileMetadata = adapter.getFileMetadata(testfile.getId());
assertNotNull(fileMetadata);
assertEquals(2, fileMetadata.size());
assertTrue(StringUtils.equals("sample value", (String) fileMetadata.get("sample key")));
assertTrue(StringUtils.equals("\"an even more 'complex' value\"! {and them some}", (String) fileMetadata.get("complex key?")));
}
cleanupUserAndRoles(mainTenant_1);
}
use of org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData in project pentaho-kettle by pentaho.
the class StreamToJobNodeConverter method convert.
/**
* @param fileId
* @return
*/
public InputStream convert(final Serializable fileId) {
InputStream is = null;
try {
if (fileId != null) {
Repository repository = connectToRepository();
RepositoryFile file = unifiedRepository.getFileById(fileId);
if (file != null) {
try {
JobMeta jobMeta = repository.loadJob(new StringObjectId(fileId.toString()), null);
if (jobMeta != null) {
Set<String> privateDatabases = jobMeta.getPrivateDatabases();
if (privateDatabases != null) {
// keep only private transformation databases
for (Iterator<DatabaseMeta> it = jobMeta.getDatabases().iterator(); it.hasNext(); ) {
String databaseName = it.next().getName();
if (!privateDatabases.contains(databaseName)) {
it.remove();
}
}
}
return new ByteArrayInputStream(jobMeta.getXML().getBytes());
}
} catch (KettleException e) {
logger.error(e);
// file is there and may be legacy, attempt simple export
SimpleRepositoryFileData fileData = unifiedRepository.getDataForRead(fileId, SimpleRepositoryFileData.class);
if (fileData != null) {
logger.warn("Reading as legacy CE job " + file.getName() + ".");
return fileData.getInputStream();
}
}
}
}
} catch (Exception e) {
logger.error(e);
}
return is;
}
use of org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData in project pentaho-kettle by pentaho.
the class StreamToTransNodeConverter method convert.
/**
* @param fileId
* @return
*/
public InputStream convert(final Serializable fileId) {
try {
// own repository. For now, get the reference
if (fileId != null) {
Repository repository = connectToRepository();
RepositoryFile file = unifiedRepository.getFileById(fileId);
if (file != null) {
try {
TransMeta transMeta = repository.loadTransformation(new StringObjectId(fileId.toString()), null);
if (transMeta != null) {
Set<String> privateDatabases = transMeta.getPrivateDatabases();
if (privateDatabases != null) {
// keep only private transformation databases
for (Iterator<DatabaseMeta> it = transMeta.getDatabases().iterator(); it.hasNext(); ) {
String databaseName = it.next().getName();
if (!privateDatabases.contains(databaseName)) {
it.remove();
}
}
}
return new ByteArrayInputStream(transMeta.getXML().getBytes());
}
} catch (KettleException e) {
logger.error(e);
// file is there and may be legacy, attempt simple export
SimpleRepositoryFileData fileData = unifiedRepository.getDataForRead(fileId, SimpleRepositoryFileData.class);
if (fileData != null) {
logger.warn("Reading as legacy CE tranformation " + file.getName() + ".");
return fileData.getInputStream();
}
}
}
}
} catch (Exception e) {
logger.error(e);
}
return null;
}
use of org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData in project pentaho-platform by pentaho.
the class Exporter method exportFileAsZip.
/**
* @param exportRepositoryFile
* @param zos
*/
private void exportFileAsZip(RepositoryFile exportRepositoryFile, ZipOutputStream zos) throws IOException {
ZipEntry entry = new ZipEntry(exportRepositoryFile.getPath().substring(filePath.length() + 1));
zos.putNextEntry(entry);
SimpleRepositoryFileData repoFileData = unifiedRepository.getDataForRead(exportRepositoryFile.getId(), SimpleRepositoryFileData.class);
InputStream is = repoFileData.getStream();
try {
IOUtils.copy(is, zos);
zos.closeEntry();
} finally {
is.close();
}
}
use of org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData in project pentaho-platform by pentaho.
the class Exporter method exportFile.
/**
* @param exportRepositoryFile
* @param exportDirectory
* @throws java.io.IOException
*/
public void exportFile(RepositoryFile exportRepositoryFile, File exportDirectory) throws IOException {
if (exportRepositoryFile == null) {
throw new FileNotFoundException(Messages.getInstance().getErrorString("Exporter.ERROR_0001_INVALID_SOURCE_DIRECTORY", repoPath));
}
if (exportDirectory.exists()) {
if (!exportDirectory.isDirectory()) {
throw new IllegalArgumentException(Messages.getInstance().getErrorString("Exporter.ERROR_0004_INVALID_DESTINATION_DIRECTORY", exportDirectory.getAbsolutePath()));
}
} else {
// Directory doesn't exist so create it
if (!exportDirectory.mkdirs()) {
throw (new IOException());
}
}
SimpleRepositoryFileData repoFileData = unifiedRepository.getDataForRead(exportRepositoryFile.getId(), SimpleRepositoryFileData.class);
InputStream is = repoFileData.getStream();
File exportFile = new File(exportDirectory.getAbsolutePath() + File.separator + exportRepositoryFile.getName());
OutputStream os = new FileOutputStream(exportFile);
try {
IOUtils.copy(is, os);
} finally {
os.close();
is.close();
}
}
Aggregations