Methods in com.splout.db.hadoop that return TableBuilder |
TableBuilder |
TableBuilder.addCascadingTable(org.apache.hadoop.fs.Path path,
java.lang.String[] columnNames)
|
TableBuilder |
TableBuilder.addCascadingTable(org.apache.hadoop.fs.Path inputPath,
java.lang.String[] columnNames,
org.apache.hadoop.conf.Configuration conf)
|
TableBuilder |
TableBuilder.addCSVTextFile(org.apache.hadoop.fs.Path path)
|
TableBuilder |
TableBuilder.addCSVTextFile(org.apache.hadoop.fs.Path path,
char separator,
char quoteCharacter,
char escapeCharacter,
boolean hasHeader,
boolean strictQuotes,
java.lang.String nullString)
|
TableBuilder |
TableBuilder.addCSVTextFile(org.apache.hadoop.fs.Path path,
char separator,
char quoteCharacter,
char escapeCharacter,
boolean hasHeader,
boolean strictQuotes,
java.lang.String nullString,
com.datasalt.pangool.io.Schema fileSchema,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addCSVTextFile(org.apache.hadoop.fs.Path path,
com.datasalt.pangool.io.Schema fileSchema,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addCSVTextFile(java.lang.String path)
|
TableBuilder |
TableBuilder.addCSVTextFile(java.lang.String path,
char separator,
char quoteCharacter,
char escapeCharacter,
boolean hasHeader,
boolean strictQuotes,
java.lang.String nullString)
|
TableBuilder |
TableBuilder.addCSVTextFile(java.lang.String path,
char separator,
char quoteCharacter,
char escapeCharacter,
boolean hasHeader,
boolean strictQuotes,
java.lang.String nullString,
com.datasalt.pangool.io.Schema fileSchema,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addCSVTextFile(java.lang.String path,
com.datasalt.pangool.io.Schema fileSchema,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addCustomInputFormatFile(org.apache.hadoop.fs.Path path,
org.apache.hadoop.mapreduce.InputFormat<com.datasalt.pangool.io.ITuple,org.apache.hadoop.io.NullWritable> inputFormat)
|
TableBuilder |
TableBuilder.addCustomInputFormatFile(org.apache.hadoop.fs.Path path,
org.apache.hadoop.mapreduce.InputFormat<com.datasalt.pangool.io.ITuple,org.apache.hadoop.io.NullWritable> inputFormat,
java.util.Map<java.lang.String,java.lang.String> specificContext,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addCustomInputFormatFile(org.apache.hadoop.fs.Path path,
org.apache.hadoop.mapreduce.InputFormat<com.datasalt.pangool.io.ITuple,org.apache.hadoop.io.NullWritable> inputFormat,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addFile(TableInput tableFile)
|
TableBuilder |
TableBuilder.addFixedWidthTextFile(org.apache.hadoop.fs.Path path,
com.datasalt.pangool.io.Schema schema,
int[] fields,
boolean hasHeader,
java.lang.String nullString,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.addHiveTable(java.lang.String dbName,
java.lang.String tableName)
|
TableBuilder |
TableBuilder.addHiveTable(java.lang.String dbName,
java.lang.String tableName,
org.apache.hadoop.conf.Configuration conf)
|
TableBuilder |
TableBuilder.addTupleFile(org.apache.hadoop.fs.Path path)
|
TableBuilder |
TableBuilder.addTupleFile(org.apache.hadoop.fs.Path path,
RecordProcessor recordProcessor)
|
TableBuilder |
TableBuilder.createIndex(java.lang.String... indexFields)
|
TableBuilder |
TableBuilder.finalSQL(java.lang.String... finalSQLStatements)
|
TableBuilder |
TableBuilder.initialSQL(java.lang.String... initialSQLStatements)
|
TableBuilder |
TableBuilder.insertionSortOrder(com.datasalt.pangool.tuplemr.OrderBy orderBy)
|
TableBuilder |
TableBuilder.partitionBy(java.lang.String... partitionByFields)
|
TableBuilder |
TableBuilder.partitionByJavaScript(java.lang.String javascript)
|
TableBuilder |
TableBuilder.postInsertsSQL(java.lang.String... postInsertsSQLStatements)
|
TableBuilder |
TableBuilder.preInsertsSQL(java.lang.String... preInsertsSQLStatements)
|
TableBuilder |
TableBuilder.replicateToAll()
|