use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestTezTask method setUp.
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
utils = mock(DagUtils.class);
fs = mock(FileSystem.class);
path = mock(Path.class);
when(path.getFileSystem(any(Configuration.class))).thenReturn(fs);
when(utils.getTezDir(any(Path.class))).thenReturn(path);
when(utils.createVertex(any(JobConf.class), any(BaseWork.class), any(Path.class), any(LocalResource.class), any(List.class), any(FileSystem.class), any(Context.class), anyBoolean(), any(TezWork.class), any(VertexType.class))).thenAnswer(new Answer<Vertex>() {
@Override
public Vertex answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
return Vertex.create(((BaseWork) args[1]).getName(), mock(ProcessorDescriptor.class), 0, mock(Resource.class));
}
});
when(utils.createEdge(any(JobConf.class), any(Vertex.class), any(Vertex.class), any(TezEdgeProperty.class), any(VertexType.class))).thenAnswer(new Answer<Edge>() {
@Override
public Edge answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
return Edge.create((Vertex) args[1], (Vertex) args[2], mock(EdgeProperty.class));
}
});
work = new TezWork("", null);
mws = new MapWork[] { new MapWork(), new MapWork() };
rws = new ReduceWork[] { new ReduceWork(), new ReduceWork() };
work.addAll(mws);
work.addAll(rws);
int i = 0;
for (BaseWork w : work.getAllWork()) {
w.setName("Work " + (++i));
}
op = mock(Operator.class);
LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
map.put("foo", op);
mws[0].setAliasToWork(map);
mws[1].setAliasToWork(map);
LinkedHashMap<Path, ArrayList<String>> pathMap = new LinkedHashMap<>();
ArrayList<String> aliasList = new ArrayList<String>();
aliasList.add("foo");
pathMap.put(new Path("foo"), aliasList);
mws[0].setPathToAliases(pathMap);
mws[1].setPathToAliases(pathMap);
rws[0].setReducer(op);
rws[1].setReducer(op);
TezEdgeProperty edgeProp = new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
work.connect(mws[0], rws[0], edgeProp);
work.connect(mws[1], rws[0], edgeProp);
work.connect(rws[0], rws[1], edgeProp);
task = new TezTask(utils);
task.setWork(work);
task.setConsole(mock(LogHelper.class));
QueryPlan mockQueryPlan = mock(QueryPlan.class);
doReturn(UUID.randomUUID().toString()).when(mockQueryPlan).getQueryId();
task.setQueryPlan(mockQueryPlan);
conf = new JobConf();
appLr = mock(LocalResource.class);
HiveConf hiveConf = new HiveConf();
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(hiveConf);
session = mock(TezClient.class);
sessionState = mock(TezSessionState.class);
when(sessionState.getSession()).thenReturn(session);
when(session.submitDAG(any(DAG.class))).thenThrow(new SessionNotRunning("")).thenReturn(mock(DAGClient.class));
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestSymlinkTextInputFormat method testCombine.
/**
* Test combine symlink text input file. Two input dir, and each contains one
* file, and then create one symlink file containing these 2 files. Normally
* without combine, it will return at least 2 splits
*/
public void testCombine() throws Exception {
JobConf newJob = new JobConf(job);
FileSystem fs = dataDir1.getFileSystem(newJob);
int symbolLinkedFileSize = 0;
Path dir1_file1 = new Path(dataDir1, "combinefile1_1");
writeTextFile(dir1_file1, "dir1_file1_line1\n" + "dir1_file1_line2\n");
symbolLinkedFileSize += fs.getFileStatus(dir1_file1).getLen();
Path dir2_file1 = new Path(dataDir2, "combinefile2_1");
writeTextFile(dir2_file1, "dir2_file1_line1\n" + "dir2_file1_line2\n");
symbolLinkedFileSize += fs.getFileStatus(dir2_file1).getLen();
// A symlink file, contains first file from first dir and second file from
// second dir.
writeSymlinkFile(new Path(symlinkDir, "symlink_file"), new Path(dataDir1, "combinefile1_1"), new Path(dataDir2, "combinefile2_1"));
HiveConf hiveConf = new HiveConf(TestSymlinkTextInputFormat.class);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK, true);
HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
Driver drv = new Driver(hiveConf);
drv.init();
String tblName = "text_symlink_text";
String createSymlinkTableCmd = "create table " + tblName + " (key int) stored as " + " inputformat 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' " + " outputformat 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'";
SessionState.start(hiveConf);
boolean tblCreated = false;
try {
int ecode = 0;
ecode = drv.run(createSymlinkTableCmd).getResponseCode();
if (ecode != 0) {
throw new Exception("Create table command: " + createSymlinkTableCmd + " failed with exit code= " + ecode);
}
tblCreated = true;
String loadFileCommand = "LOAD DATA LOCAL INPATH '" + new Path(symlinkDir, "symlink_file").toString() + "' INTO TABLE " + tblName;
ecode = drv.run(loadFileCommand).getResponseCode();
if (ecode != 0) {
throw new Exception("Load data command: " + loadFileCommand + " failed with exit code= " + ecode);
}
String cmd = "select key*1 from " + tblName;
ecode = drv.compile(cmd);
if (ecode != 0) {
throw new Exception("Select compile: " + cmd + " failed with exit code= " + ecode);
}
//create scratch dir
Context ctx = new Context(newJob);
Path emptyScratchDir = ctx.getMRTmpPath();
FileSystem fileSys = emptyScratchDir.getFileSystem(newJob);
fileSys.mkdirs(emptyScratchDir);
QueryPlan plan = drv.getPlan();
MapRedTask selectTask = (MapRedTask) plan.getRootTasks().get(0);
List<Path> inputPaths = Utilities.getInputPaths(newJob, selectTask.getWork().getMapWork(), emptyScratchDir, ctx, false);
Utilities.setInputPaths(newJob, inputPaths);
Utilities.setMapRedWork(newJob, selectTask.getWork(), ctx.getMRTmpPath());
CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance(CombineHiveInputFormat.class, newJob);
InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1);
assertEquals(1, retSplits.length);
} catch (Exception e) {
e.printStackTrace();
fail("Caught exception " + e);
} finally {
if (tblCreated) {
drv.run("drop table text_symlink_text").getResponseCode();
}
}
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testRollback.
@Test
public void testRollback() throws Exception {
WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
QueryPlan qp = new MockQueryPlan(this);
txnMgr.openTxn(ctx, "fred");
txnMgr.acquireLocks(qp, ctx, "fred");
List<HiveLock> locks = ctx.getHiveLocks();
Assert.assertEquals(1, locks.size());
Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
txnMgr.rollbackTxn();
locks = txnMgr.getLockManager().getLocks(false, false);
Assert.assertEquals(0, locks.size());
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testReadWrite.
@Test
public void testReadWrite() throws Exception {
Table t = newTable(true);
addPartitionInput(t);
addPartitionInput(t);
addPartitionInput(t);
WriteEntity we = addTableOutput(WriteEntity.WriteType.INSERT);
QueryPlan qp = new MockQueryPlan(this);
txnMgr.openTxn(ctx, "fred");
txnMgr.acquireLocks(qp, ctx, "fred");
List<HiveLock> locks = ctx.getHiveLocks();
Assert.assertEquals(1, locks.size());
Assert.assertEquals(4, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
txnMgr.commitTxn();
locks = txnMgr.getLockManager().getLocks(false, false);
Assert.assertEquals(0, locks.size());
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testSingleReadMultiPartition.
@Test
public void testSingleReadMultiPartition() throws Exception {
Table t = newTable(true);
addPartitionInput(t);
addPartitionInput(t);
addPartitionInput(t);
QueryPlan qp = new MockQueryPlan(this);
txnMgr.acquireLocks(qp, ctx, "fred");
List<HiveLock> locks = ctx.getHiveLocks();
Assert.assertEquals(1, locks.size());
Assert.assertEquals(3, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks = txnMgr.getLockManager().getLocks(false, false);
Assert.assertEquals(0, locks.size());
}
Aggregations