use of com.google.common.collect.ImmutableMultimap in project buck by facebook.
the class SmartDexingStep method execute.
@Override
public StepExecutionResult execute(ExecutionContext context) throws InterruptedException {
try {
Multimap<Path, Path> outputToInputs = outputToInputsSupplier.get();
runDxCommands(context, outputToInputs);
if (secondaryOutputDir.isPresent()) {
removeExtraneousSecondaryArtifacts(secondaryOutputDir.get(), outputToInputs.keySet(), filesystem);
// Concatenate if solid compression is specified.
// create a mapping of the xzs file target and the dex.jar files that go into it
ImmutableMultimap.Builder<Path, Path> secondaryDexJarsMultimapBuilder = ImmutableMultimap.builder();
for (Path p : outputToInputs.keySet()) {
if (DexStore.XZS.matchesPath(p)) {
String[] matches = p.getFileName().toString().split("-");
Path output = p.getParent().resolve(matches[0].concat(SECONDARY_SOLID_DEX_EXTENSION));
secondaryDexJarsMultimapBuilder.put(output, p);
}
}
ImmutableMultimap<Path, Path> secondaryDexJarsMultimap = secondaryDexJarsMultimapBuilder.build();
if (!secondaryDexJarsMultimap.isEmpty()) {
for (Map.Entry<Path, Collection<Path>> entry : secondaryDexJarsMultimap.asMap().entrySet()) {
Path store = entry.getKey();
Collection<Path> secondaryDexJars = entry.getValue();
// Construct the output path for our solid blob and its compressed form.
Path secondaryBlobOutput = store.getParent().resolve("uncompressed.dex.blob");
Path secondaryCompressedBlobOutput = store;
// Concatenate the jars into a blob and compress it.
StepRunner stepRunner = new DefaultStepRunner();
Step concatStep = new ConcatStep(filesystem, ImmutableList.copyOf(secondaryDexJars), secondaryBlobOutput);
Step xzStep;
if (xzCompressionLevel.isPresent()) {
xzStep = new XzStep(filesystem, secondaryBlobOutput, secondaryCompressedBlobOutput, xzCompressionLevel.get().intValue());
} else {
xzStep = new XzStep(filesystem, secondaryBlobOutput, secondaryCompressedBlobOutput);
}
stepRunner.runStepForBuildTarget(context, concatStep, Optional.empty());
stepRunner.runStepForBuildTarget(context, xzStep, Optional.empty());
}
}
}
} catch (StepFailedException | IOException e) {
context.logError(e, "There was an error in smart dexing step.");
return StepExecutionResult.ERROR;
}
return StepExecutionResult.SUCCESS;
}
use of com.google.common.collect.ImmutableMultimap in project bazel by bazelbuild.
the class FdoSupport method readAutoFdoImports.
/**
* Reads a .afdo.imports file and stores the imports information.
*/
private static ImmutableMultimap<PathFragment, PathFragment> readAutoFdoImports(Path importsFile) throws IOException, FdoException {
ImmutableMultimap.Builder<PathFragment, PathFragment> importBuilder = ImmutableMultimap.builder();
for (String line : FileSystemUtils.iterateLinesAsLatin1(importsFile)) {
line = line.trim();
if (line.isEmpty()) {
continue;
}
int colonIndex = line.indexOf(':');
if (colonIndex < 0) {
continue;
}
PathFragment key = new PathFragment(line.substring(0, colonIndex));
if (key.isAbsolute()) {
throw new FdoException("Absolute paths not allowed in afdo imports file " + importsFile + ": " + key);
}
for (String auxFile : line.substring(colonIndex + 1).split(" ")) {
if (auxFile.length() == 0) {
continue;
}
importBuilder.put(key, new PathFragment(auxFile));
}
}
return importBuilder.build();
}
use of com.google.common.collect.ImmutableMultimap in project presto by prestodb.
the class AbstractTestHiveClient method doTestMismatchSchemaTable.
protected void doTestMismatchSchemaTable(SchemaTableName schemaTableName, HiveStorageFormat storageFormat, List<ColumnMetadata> tableBefore, MaterializedResult dataBefore, List<ColumnMetadata> tableAfter, MaterializedResult dataAfter) throws Exception {
String schemaName = schemaTableName.getSchemaName();
String tableName = schemaTableName.getTableName();
doCreateEmptyTable(schemaTableName, storageFormat, tableBefore);
// insert the data
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, schemaTableName);
ConnectorInsertTableHandle insertTableHandle = metadata.beginInsert(session, tableHandle);
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, insertTableHandle);
sink.appendPage(dataBefore.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
metadata.finishInsert(session, insertTableHandle, fragments);
transaction.commit();
}
// load the table and verify the data
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, schemaTableName);
List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toList());
MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.empty(), Optional.empty());
assertEqualsIgnoreOrder(result.getMaterializedRows(), dataBefore.getMaterializedRows());
transaction.commit();
}
// alter the table schema
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
PrincipalPrivileges principalPrivileges = new PrincipalPrivileges(ImmutableMultimap.<String, HivePrivilegeInfo>builder().put(session.getUser(), new HivePrivilegeInfo(HivePrivilege.SELECT, true)).put(session.getUser(), new HivePrivilegeInfo(HivePrivilege.INSERT, true)).put(session.getUser(), new HivePrivilegeInfo(HivePrivilege.UPDATE, true)).put(session.getUser(), new HivePrivilegeInfo(HivePrivilege.DELETE, true)).build(), ImmutableMultimap.of());
Table oldTable = transaction.getMetastore(schemaName).getTable(schemaName, tableName).get();
HiveTypeTranslator hiveTypeTranslator = new HiveTypeTranslator();
List<Column> dataColumns = tableAfter.stream().filter(columnMetadata -> !columnMetadata.getName().equals("ds")).map(columnMetadata -> new Column(columnMetadata.getName(), toHiveType(hiveTypeTranslator, columnMetadata.getType()), Optional.empty())).collect(toList());
Table.Builder newTable = Table.builder(oldTable).setDataColumns(dataColumns);
transaction.getMetastore(schemaName).replaceView(schemaName, tableName, newTable.build(), principalPrivileges);
transaction.commit();
}
// load the altered table and verify the data
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, schemaTableName);
List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toList());
MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.empty(), Optional.empty());
assertEqualsIgnoreOrder(result.getMaterializedRows(), dataAfter.getMaterializedRows());
transaction.commit();
}
// insertions to the partitions with type mismatches should fail
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, schemaTableName);
ConnectorInsertTableHandle insertTableHandle = metadata.beginInsert(session, tableHandle);
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, insertTableHandle);
sink.appendPage(dataAfter.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
metadata.finishInsert(session, insertTableHandle, fragments);
transaction.commit();
fail("expected exception");
} catch (PrestoException e) {
// expected
assertEquals(e.getErrorCode(), HIVE_PARTITION_SCHEMA_MISMATCH.toErrorCode());
}
}
use of com.google.common.collect.ImmutableMultimap in project buck by facebook.
the class Actions method getResultingSourceMapping.
public ImmutableMultimap<Integer, Record> getResultingSourceMapping(@NonNull XmlDocument xmlDocument) throws ParserConfigurationException, SAXException, IOException {
SourceFile inMemory = SourceFile.UNKNOWN;
XmlDocument loadedWithLineNumbers = XmlLoader.load(xmlDocument.getSelectors(), xmlDocument.getSystemPropertyResolver(), inMemory, xmlDocument.prettyPrint(), XmlDocument.Type.MAIN, Optional.<String>absent());
ImmutableMultimap.Builder<Integer, Record> mappingBuilder = ImmutableMultimap.builder();
for (XmlElement xmlElement : loadedWithLineNumbers.getRootNode().getMergeableElements()) {
parse(xmlElement, mappingBuilder);
}
return mappingBuilder.build();
}
use of com.google.common.collect.ImmutableMultimap in project intellij by bazelbuild.
the class SourceToTargetMapImpl method computeSourceToTargetMap.
@SuppressWarnings("unused")
private static ImmutableMultimap<File, TargetKey> computeSourceToTargetMap(Project project, BlazeProjectData blazeProjectData) {
ArtifactLocationDecoder artifactLocationDecoder = blazeProjectData.artifactLocationDecoder;
ImmutableMultimap.Builder<File, TargetKey> sourceToTargetMap = ImmutableMultimap.builder();
for (TargetIdeInfo target : blazeProjectData.targetMap.targets()) {
TargetKey key = target.key;
for (ArtifactLocation sourceArtifact : target.sources) {
sourceToTargetMap.put(artifactLocationDecoder.decode(sourceArtifact), key);
}
}
return sourceToTargetMap.build();
}
Aggregations