use of com.thinkbiganalytics.spark.dataprofiler.StatisticsModel in project kylo by Teradata.
the class ProfilerTest method setUp.
@Before
@SuppressWarnings("unchecked")
public void setUp() {
if (columnStatsMap == null) {
StructField[] schemaFields = new StructField[15];
schemaFields[0] = DataTypes.createStructField("id", DataTypes.IntegerType, true);
schemaFields[1] = DataTypes.createStructField("firstname", DataTypes.StringType, true);
schemaFields[2] = DataTypes.createStructField("lastname", DataTypes.StringType, true);
schemaFields[3] = DataTypes.createStructField("age", DataTypes.IntegerType, true);
schemaFields[4] = DataTypes.createStructField("description", DataTypes.StringType, true);
schemaFields[5] = DataTypes.createStructField("height", DataTypes.DoubleType, true);
schemaFields[6] = DataTypes.createStructField("joindate", DataTypes.DateType, true);
schemaFields[7] = DataTypes.createStructField("lifemember", DataTypes.BooleanType, true);
schemaFields[8] = DataTypes.createStructField("lastlogin", DataTypes.TimestampType, true);
schemaFields[9] = DataTypes.createStructField("phash", DataTypes.LongType, true);
schemaFields[10] = DataTypes.createStructField("weight", DataTypes.FloatType, true);
schemaFields[11] = DataTypes.createStructField("credits", DataTypes.ShortType, true);
schemaFields[12] = DataTypes.createStructField("ccode", DataTypes.ByteType, true);
schemaFields[13] = DataTypes.createStructField("score", DataTypes.createDecimalType(7, 5), true);
schemaFields[14] = DataTypes.createStructField("favoritepet", DataTypes.StringType, true);
StructType schema = DataTypes.createStructType(schemaFields);
List<Row> rows = new ArrayList<>();
rows.add(RowFactory.create(1, "Jon", "Wright", 14, "Jon::Wright", 5.85d, Date.valueOf("2010-05-04"), Boolean.TRUE, Timestamp.valueOf("2008-05-06 23:10:10"), 1456890911L, 40.2f, (short) 100, (byte) 99, new BigDecimal(String.valueOf(1.567)), "Cat"));
rows.add(RowFactory.create(2, "Jon", "Hudson", null, "Jon::Hudson", 5.85d, Date.valueOf("1990-10-25"), null, Timestamp.valueOf("2011-01-08 11:25:45"), 7638962135L, 110.5f, (short) 100, (byte) 99, new BigDecimal(String.valueOf(8.223)), "alligator"));
rows.add(RowFactory.create(3, "Rachael", "Hu", 40, "Rachael::Hu", 6.22d, Date.valueOf("1990-10-25"), Boolean.TRUE, Timestamp.valueOf("2011-01-08 11:25:45"), 2988626110L, 160.7f, (short) 1400, (byte) 99, new BigDecimal(String.valueOf(1.567)), "Alpaca"));
rows.add(RowFactory.create(4, EMPTY_STRING, EMPTY_STRING, 40, null, null, Date.valueOf("1956-11-12"), Boolean.TRUE, Timestamp.valueOf("2008-05-06 23:10:10"), 2988626110L, null, null, (byte) 99, null, "Cat"));
rows.add(RowFactory.create(5, "Rachael", EMPTY_STRING, 22, "Rachael::", 5.85d, Date.valueOf("2005-12-24"), Boolean.FALSE, Timestamp.valueOf("2008-05-06 23:10:10"), 8260467621L, 160.7f, (short) 100, null, new BigDecimal(String.valueOf(4.343)), "Zebra"));
rows.add(RowFactory.create(6, "Elizabeth", "Taylor", 40, "Elizabeth::Taylor", 5.85d, Date.valueOf("2011-08-08"), null, Timestamp.valueOf("2016-01-14 14:20:20"), 8732866249L, null, (short) 1400, null, new BigDecimal(String.valueOf(4.343)), "ZEBRA"));
rows.add(RowFactory.create(7, "Jon", "Taylor", 18, "Jon::Taylor", null, Date.valueOf("2011-08-08"), Boolean.TRUE, Timestamp.valueOf("2011-01-08 11:25:45"), 2988626110L, 110.5f, (short) 500, (byte) 40, new BigDecimal(String.valueOf(4.343)), null));
rows.add(RowFactory.create(8, "Rachael", EMPTY_STRING, 22, "Rachael::", 4.37d, Date.valueOf("2011-08-08"), Boolean.FALSE, Timestamp.valueOf("2008-05-06 23:10:10"), 8782348100L, null, null, null, null, "albatross"));
rows.add(RowFactory.create(9, EMPTY_STRING, "Edmundson Jr", 11, "::Edmundson Jr", 4.88d, Date.valueOf("2007-06-07"), Boolean.FALSE, Timestamp.valueOf("2007-03-16 08:24:37"), null, 155.3f, (short) 0, (byte) 99, new BigDecimal(String.valueOf(1.567)), EMPTY_STRING));
rows.add(RowFactory.create(10, "Jon", EMPTY_STRING, 65, "Jon::", null, Date.valueOf("1975-04-04"), Boolean.TRUE, Timestamp.valueOf("2007-03-16 08:24:31"), null, 180.6f, (short) 5000, (byte) 2, new BigDecimal(String.valueOf(4.343)), "Cat"));
final JavaSparkContext javaSparkContext = JavaSparkContext.fromSparkContext(sqlContext.sparkContext());
JavaRDD<Row> dataRDD = javaSparkContext.parallelize(rows);
DataSet dataDF = scs.toDataSet(sqlContext.createDataFrame(dataRDD, schema));
/* Enable to debug contents of test data */
/*
for (Row r: dataRDD.collect()) {
System.out.println(r.toString());
}
*/
StatisticsModel statsModel = profiler.profile(dataDF, new ProfilerConfiguration());
columnStatsMap = (statsModel != null) ? (Map) statsModel.getColumnStatisticsMap() : (Map<Integer, StandardColumnStatistics>) Collections.EMPTY_MAP;
}
}
use of com.thinkbiganalytics.spark.dataprofiler.StatisticsModel in project kylo by Teradata.
the class ProfileStage method apply.
@Nonnull
@Override
public TransformResult apply(@Nullable final TransformResult result) {
Preconditions.checkNotNull(result);
// Profile data set
final StatisticsModel dataStats = profiler.profile(result.getDataSet(), new ProfilerConfiguration());
// Add stats to result
if (dataStats != null) {
final List<OutputRow> profile = (result.getProfile() != null) ? new ArrayList<>(result.getProfile()) : new ArrayList<OutputRow>(dataStats.getColumnStatisticsMap().size());
for (final ColumnStatistics columnStats : dataStats.getColumnStatisticsMap().values()) {
profile.addAll(columnStats.getStatistics());
}
result.setProfile(profile);
}
return result;
}
use of com.thinkbiganalytics.spark.dataprofiler.StatisticsModel in project kylo by Teradata.
the class Profiler method run.
public void run(String[] args) {
/* Variables */
DataSet resultDF;
String queryString;
/* Check command line arguments and get query to run. */
if ((queryString = checkCommandLineArgs(args)) == null) {
return;
}
/* Run query and get result */
log.info("[PROFILER-INFO] Analyzing profile statistics for: [{}]", queryString);
resultDF = sparkContextService.sql(sqlContext, queryString);
/* Get profile statistics and write to table */
final StatisticsModel statisticsModel = profiler.profile(resultDF, profilerConfiguration);
if (statisticsModel != null) {
OutputWriter.writeModel(statisticsModel, profilerConfiguration, sqlContext, sparkContextService);
} else {
log.info("[PROFILER-INFO] No data to process. Hence, no profile statistics generated.");
}
/* Wrap up */
log.info("[PROFILER-INFO] Profiling finished.");
}
Aggregations