use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestLlapOutputFormat method testValues.
@Test
public void testValues() throws Exception {
JobConf job = new JobConf();
for (int k = 0; k < 5; ++k) {
String id = "foobar" + k;
job.set(LlapOutputFormat.LLAP_OF_ID_KEY, id);
LlapOutputFormat format = new LlapOutputFormat();
HiveConf conf = new HiveConf();
Socket socket = new Socket("localhost", service.getPort());
LOG.debug("Socket connected");
OutputStream socketStream = socket.getOutputStream();
LlapOutputSocketInitMessage.newBuilder().setFragmentId(id).build().writeDelimitedTo(socketStream);
socketStream.flush();
Thread.sleep(3000);
LOG.debug("Data written");
RecordWriter<NullWritable, Text> writer = format.getRecordWriter(null, job, null, null);
Text text = new Text();
LOG.debug("Have record writer");
for (int i = 0; i < 10; ++i) {
text.set("" + i);
writer.write(NullWritable.get(), text);
}
writer.close(null);
InputStream in = socket.getInputStream();
LlapBaseRecordReader reader = new LlapBaseRecordReader(in, null, Text.class, job, null, null);
LOG.debug("Have record reader");
// Send done event, which LlapRecordReader is expecting upon end of input
reader.handleEvent(ReaderEvent.doneEvent());
int count = 0;
while (reader.next(NullWritable.get(), text)) {
LOG.debug(text.toString());
count++;
}
reader.close();
Assert.assertEquals(10, count);
}
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestTezTask method setUp.
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
utils = mock(DagUtils.class);
fs = mock(FileSystem.class);
path = mock(Path.class);
when(path.getFileSystem(any(Configuration.class))).thenReturn(fs);
when(utils.getTezDir(any(Path.class))).thenReturn(path);
when(utils.createVertex(any(JobConf.class), any(BaseWork.class), any(Path.class), any(LocalResource.class), any(List.class), any(FileSystem.class), any(Context.class), anyBoolean(), any(TezWork.class), any(VertexType.class))).thenAnswer(new Answer<Vertex>() {
@Override
public Vertex answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
return Vertex.create(((BaseWork) args[1]).getName(), mock(ProcessorDescriptor.class), 0, mock(Resource.class));
}
});
when(utils.createEdge(any(JobConf.class), any(Vertex.class), any(Vertex.class), any(TezEdgeProperty.class), any(VertexType.class))).thenAnswer(new Answer<Edge>() {
@Override
public Edge answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
return Edge.create((Vertex) args[1], (Vertex) args[2], mock(EdgeProperty.class));
}
});
work = new TezWork("", null);
mws = new MapWork[] { new MapWork(), new MapWork() };
rws = new ReduceWork[] { new ReduceWork(), new ReduceWork() };
work.addAll(mws);
work.addAll(rws);
int i = 0;
for (BaseWork w : work.getAllWork()) {
w.setName("Work " + (++i));
}
op = mock(Operator.class);
LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
map.put("foo", op);
mws[0].setAliasToWork(map);
mws[1].setAliasToWork(map);
LinkedHashMap<Path, ArrayList<String>> pathMap = new LinkedHashMap<>();
ArrayList<String> aliasList = new ArrayList<String>();
aliasList.add("foo");
pathMap.put(new Path("foo"), aliasList);
mws[0].setPathToAliases(pathMap);
mws[1].setPathToAliases(pathMap);
rws[0].setReducer(op);
rws[1].setReducer(op);
TezEdgeProperty edgeProp = new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
work.connect(mws[0], rws[0], edgeProp);
work.connect(mws[1], rws[0], edgeProp);
work.connect(rws[0], rws[1], edgeProp);
task = new TezTask(utils);
task.setWork(work);
task.setConsole(mock(LogHelper.class));
QueryPlan mockQueryPlan = mock(QueryPlan.class);
doReturn(UUID.randomUUID().toString()).when(mockQueryPlan).getQueryId();
task.setQueryPlan(mockQueryPlan);
conf = new JobConf();
appLr = mock(LocalResource.class);
HiveConf hiveConf = new HiveConf();
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(hiveConf);
session = mock(TezClient.class);
sessionState = mock(TezSessionState.class);
when(sessionState.getSession()).thenReturn(session);
when(session.submitDAG(any(DAG.class))).thenThrow(new SessionNotRunning("")).thenReturn(mock(DAGClient.class));
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestSymlinkTextInputFormat method testCombine.
/**
* Test combine symlink text input file. Two input dir, and each contains one
* file, and then create one symlink file containing these 2 files. Normally
* without combine, it will return at least 2 splits
*/
public void testCombine() throws Exception {
JobConf newJob = new JobConf(job);
FileSystem fs = dataDir1.getFileSystem(newJob);
int symbolLinkedFileSize = 0;
Path dir1_file1 = new Path(dataDir1, "combinefile1_1");
writeTextFile(dir1_file1, "dir1_file1_line1\n" + "dir1_file1_line2\n");
symbolLinkedFileSize += fs.getFileStatus(dir1_file1).getLen();
Path dir2_file1 = new Path(dataDir2, "combinefile2_1");
writeTextFile(dir2_file1, "dir2_file1_line1\n" + "dir2_file1_line2\n");
symbolLinkedFileSize += fs.getFileStatus(dir2_file1).getLen();
// A symlink file, contains first file from first dir and second file from
// second dir.
writeSymlinkFile(new Path(symlinkDir, "symlink_file"), new Path(dataDir1, "combinefile1_1"), new Path(dataDir2, "combinefile2_1"));
HiveConf hiveConf = new HiveConf(TestSymlinkTextInputFormat.class);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK, true);
HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
Driver drv = new Driver(hiveConf);
drv.init();
String tblName = "text_symlink_text";
String createSymlinkTableCmd = "create table " + tblName + " (key int) stored as " + " inputformat 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' " + " outputformat 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'";
SessionState.start(hiveConf);
boolean tblCreated = false;
try {
int ecode = 0;
ecode = drv.run(createSymlinkTableCmd).getResponseCode();
if (ecode != 0) {
throw new Exception("Create table command: " + createSymlinkTableCmd + " failed with exit code= " + ecode);
}
tblCreated = true;
String loadFileCommand = "LOAD DATA LOCAL INPATH '" + new Path(symlinkDir, "symlink_file").toString() + "' INTO TABLE " + tblName;
ecode = drv.run(loadFileCommand).getResponseCode();
if (ecode != 0) {
throw new Exception("Load data command: " + loadFileCommand + " failed with exit code= " + ecode);
}
String cmd = "select key*1 from " + tblName;
ecode = drv.compile(cmd);
if (ecode != 0) {
throw new Exception("Select compile: " + cmd + " failed with exit code= " + ecode);
}
//create scratch dir
Context ctx = new Context(newJob);
Path emptyScratchDir = ctx.getMRTmpPath();
FileSystem fileSys = emptyScratchDir.getFileSystem(newJob);
fileSys.mkdirs(emptyScratchDir);
QueryPlan plan = drv.getPlan();
MapRedTask selectTask = (MapRedTask) plan.getRootTasks().get(0);
List<Path> inputPaths = Utilities.getInputPaths(newJob, selectTask.getWork().getMapWork(), emptyScratchDir, ctx, false);
Utilities.setInputPaths(newJob, inputPaths);
Utilities.setMapRedWork(newJob, selectTask.getWork(), ctx.getMRTmpPath());
CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance(CombineHiveInputFormat.class, newJob);
InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1);
assertEquals(1, retSplits.length);
} catch (Exception e) {
e.printStackTrace();
fail("Caught exception " + e);
} finally {
if (tblCreated) {
drv.run("drop table text_symlink_text").getResponseCode();
}
}
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestHooks method testQueryRedactor.
@Test
public void testQueryRedactor() throws Exception {
HiveConf conf = new HiveConf(TestHooks.class);
HiveConf.setVar(conf, HiveConf.ConfVars.QUERYREDACTORHOOKS, SimpleQueryRedactor.class.getName());
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Driver driver = createDriver(conf);
int ret = driver.compile("select 'XXX' from t1");
assertEquals("Checking command success", 0, ret);
assertEquals("select 'AAA' from t1", conf.getQueryString());
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestQueryHooks method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() {
conf = new HiveConf(TestQueryHooks.class);
conf.setVar(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS, TestLifeTimeHook.class.getName());
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
}
Aggregations