- 阅读权限
- 255
- 威望
- 0 级
- 论坛币
- 2491 个
- 通用积分
- 8.4272
- 学术水平
- 10 点
- 热心指数
- 16 点
- 信用等级
- 8 点
- 经验
- 2008 点
- 帖子
- 346
- 精华
- 0
- 在线时间
- 613 小时
- 注册时间
- 2013-4-26
- 最后登录
- 2023-3-18
已卖:139份资源
讲师
还不是VIP/贵宾
- 威望
- 0 级
- 论坛币
 - 2491 个
- 通用积分
- 8.4272
- 学术水平
- 10 点
- 热心指数
- 16 点
- 信用等级
- 8 点
- 经验
- 2008 点
- 帖子
- 346
- 精华
- 0
- 在线时间
- 613 小时
- 注册时间
- 2013-4-26
- 最后登录
- 2023-3-18
 | 无聊 2018-7-9 21:49:48 |
|---|
签到天数: 444 天 连续签到: 1 天 [LV.9]以坛为家II
|
经管之家送您一份
应届毕业生专属福利!
求职就业群
感谢您参与论坛问题回答
经管之家送您两个论坛币!
+2 论坛币
- # start
- sh $HIVE_HOME/bin/hive --service hwi
- hadoop fs -text /user/admin/daiqf/createspu_fp/input/cateinfo | head
- # 005 means the tab
- CREATE TABLE IF NOT EXISTS table_name ROW FORMAT DELIMITED FIELDS TERMINATED BY '\005' STORED AS textfile
- hadoop fs -ls
- # partition
- # 1.build table
- CREATE TABLE c02_clickstat_fatdtl
- (
- yyyymmdd string,
- id int,
- ip string,
- country string,
- cookie_id string,
- page_id string,
- clickstat_url_id int,
- query_string string,
- refer string
- )PARTITIONED BY(dt STRING)
- row format delimited fields terminated by '\005'
- # load data
- LOAD DATA INPATH '/user/admin/SqlldrDat/CnClickstat/20101101/19/clickstat_gp_fatdt0/0'
- OVERWRITE INTO TABLE c02_clickstat_fatdtl PARTITION(dt='20101101');
- # visit a partition
- SELECT count(*)
- FROM c02_clickstat_fatdtl a
- WHERE a.dt >= '20101101' AND a.dt < '20101102';
- select * from ods_dpi_mob_data_log a where a.dt >= '2014051814' and a.dt < '2014051815'
- # add a partition
- alter table ods_dpi_mob_data_log add partition(dt='2014051814')
- show partitions ods_dpi_mob_data_log;
- ALTER TABLE table_name RENAME TO new_table_name
- hive> desc xi;
- OK
- id int
- cont string
- dw_ins_date string
- hive> create table xibak like xi;
- OK
- # add
- hive> alter table xibak add columns (ins_date1 string);
- OK
- hive> desc xibak;
- OK
- id int
- cont string
- dw_ins_date string
- ins_date1 string
- # repalce all
- hive> alter table xibak repalce columns (ins_date2 string);
- OK
- hive> desc xibak;
- OK
- ins_date2 string
- SHOW TABLES
- SHOW TABLES 'page.*'
- SHOW TABLES '.*view'
- SHOW PARTITIONS page_view
- DESCRIBE invites
- SELECT a.foo FROM invites a WHERE a.ds='2008-08-15'
- SELECT a.foo FROM invites a limit 3;
- DESCRIBE EXTENDED page_view PARTITION (ds='2008-08-08')
- # add to the origin table
- LOAD DATA LOCAL INPATH '/tmp/pv_2008-06-08_us.txt' INTO TABLE c02 PARTITION(date='2008-06-08', country='US')
- LOAD DATA LOCAL INPATH './examples/files/kvl.txt' INTO TABLE pokes;
- # overwirte
- LOAD DATA INPATH '/user/admin/SqlldrDat/CnClickstat/20101101/19/clickstat_gp_fatdt0/0'
- OVERWRITE INTO TABLE c02_clickstat_fatdtl PARTITION(dt='20101101');
- hive> FROM invites a INSERT OVERWRITE TABLE events SELECT a.bar, count(*)
- WHERE a.foo > 0 GROUP BY a.bar;
- hiev> INSERT OVERWRITE TABLE events SELECT a.bar, count(*) FROM invites a
- WHERE a.foo > 0 GROUP BY a.bar;
- SELECT a.* FROM a JOIN b ON (a.id = b.id)
- SELECT a.* FROM a JOIN b ON (a.id = b.id AND a.department = b.department)
- SELECT a.val, b.val, c.val FROM a JOIN b ON (a.key = b.key1) JOIN c ON (c.key = b.key2)
- SELECT a.val, b.val, c.val
- FROM OUTER JOIN b ON (a.key = b.key)
- WHERE a.ds='2009-07-07' AND b.ds = '2009-07-07'
- SELECT a.val, b.val FROM a LEFT OUTER JOIN b
- ON (a.key = b.key AND b.ds = '2009-07-07' AND a.ds = '2009-07-07')
- # hive
- # -i <filename> initialization sql from file
- # -e 'quoted query string' sql from command line
- # -f <filename> sql from file
- hive -e "use ku; show tables" #查看ku库中的所有表
- hive -e "show databases" #查看所有的databases
- show tables like '*name*' #查看名字包含name的数据库
- desc formatted table_name #查看数据库的结构
- lbs_lifedata_
- lbs_lifebase_s
- desc formatted lbs_lifedata_lbs_batch_plan
- hive -S -e "select * FROM mytable LIMIT 3"
- less -SN table.desc #可以查看table按照格式
- hadoop fs -lsr /apps/hduser4899
- hadoop fs -dus /apps/hduser4899/heater/heater_agraph
- hadoop fs -get /apps/hduser4899/heater/heater_idno_name/* ./
- table=$1
- hive -e "select * from $1 limit 1000" | gzip > $1.1000.gz
复制代码
扫码加我 拉你入群
请注明:姓名-公司-职位
以便审核进群资格,未注明则拒绝
|
|
-
总评分: 经验 + 20
论坛币 + 20
查看全部评分
|