createtableifnotexists textfile_table(
site string,
urlstring,
pv bigint,
label string)
rowformatdelimitedfieldsterminatedby'\t'storedas textfile;
插入数据操作:
set hive.exec.compress.output=true;
set mapred.output.compress=true;
set mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
set io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec;
insert overwrite table textfile_table select * from textfile_table;
createtableifnotexists seqfile_table(
site string,
urlstring,
pv bigint,
label string)
rowformatdelimitedfieldsterminatedby'\t'storedas sequencefile;
插入数据操作:
set hive.exec.compress.output=true;
set mapred.output.compress=true;
set mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
set io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec;
SET mapred.output.compression.type=BLOCK;
insert overwrite table seqfile_table select * from textfile_table;
createtableifnotexists rcfile_table(
site string,
urlstring,
pv bigint,
label string)
rowformatdelimitedfieldsterminatedby'\t'storedas rcfile;
插入数据操作:
set hive.exec.compress.output=true;
set mapred.output.compress=true;
set mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
set io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec;
insert overwrite table rcfile_table select * from textfile_table;