溫馨提示×

hive建表中文亂碼如何解決

小億
384
2024-05-29 09:54:22
欄目: 大數據

在Hive中遇到中文亂碼的問題,可能是因為Hive默認使用的編碼方式不是UTF-8,導致無法正確存儲和顯示中文字符。解決方法如下:

  1. 在創建表時指定字符集為UTF-8:
CREATE TABLE table_name (
    column1 STRING,
    column2 INT
)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS TEXTFILE
TBLPROPERTIES ("charset"="utf8");
  1. 在Hive配置文件hive-site.xml中設置默認字符集為UTF-8:
<property>
  <name>hive.default.charset</name>
  <value>utf8</value>
</property>
  1. 修改Hive會話的字符集:
SET hive.cli.print.header=true;
SET hive.cli.print.current.db=true;
SET hive.cli.print.header=true;
SET hive.cli.print.current.db=true;
SET hive.resultset.use.unique.column.names=false;
SET mapred.reduce.tasks=1;
SET hive.metastore.warehouse.dir=/user/hive/warehouse;
SET hive.support.concurrency=false;
SET hive.enforce.bucketing=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.execution.engine=tez;
SET hive.execution.engine=mr;
SET hive.exec.parallel=true;
SET hive.exec.compress.output=true;
SET hive.exec.compress.intermediate=true;
SET hive.exec.reducers.bytes.per.reducer=128000000;
SET hive.exec.reducers.max=1099;
SET hive.exec.reducers.min=1;
SET mapred.reduce.tasks=8;
SET mapred.map.tasks=8;
SET mapred.max.split.size=268435456;
SET hive.exec.max.dynamic.partitions=100000;
SET hive.exec.max.dynamic.partitions.pernode=100000;
SET hive.exec.max.created.files=100000;
SET mapreduce.job.queuename=default;
SET hive.exec.submitviachild=true;
SET hive.downloaded.resources.dir=/tmp/$USER/hive_resources;
SET hive.scratch.dir=/tmp/$USER/hive_scratch;
SET hive.server2.enable.impersonation=true;
SET hive.mapred.local.mode=true;
SET hive.auto.convert.join=true;
SET hive.mapjoin.smalltable.filesize=25000000;
SET hive.auto.convert.join.noconditionaltask=true;
SET hive.compute.query.using.stats=true;
SET hive.stats.fetch.column.stats=true;
SET hive.stats.fetch.partition.stats=true;
SET hive.security.authorization.sqlstd.confwhitelist.append=false;
SET hive.strict.checks.large.query=true;
SET hive.stats.autogather=true;
SET hive.stats.autogather=true;
SET hive.compute.query.using.stats=true;
SET hive.stats.fetch.column.stats=true;
SET hive.stats.fetch.partition.stats=true;
SET hive.security.authorization.sqlstd.confwhitelist.append=false;
SET hive.strict.checks.large.query=true;
SET hive.exec.reducers.max=1099;
SET hive.security.authorization.enabled=false;
SET hive.cli.print.header=true;
SET hive.cli.print.current.db=true;
SET hive.mapred.reduce.tasks=1;
SET hive.metastore.warehouse.dir=/user/hive/warehouse;
SET hive.support.concurrency=false;
SET hive.enforce.bucketing=true;
SET hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.execution.engine=tez;
SET hive.execution.engine=mr;
SET hive.exec.parallel=true;
SET hive.exec.compress.output=true;
SET hive.exec.compress.intermediate=true;
SET hive.exec.reducers.bytes.per.reducer=128000000;
SET hive.exec.reducers.max=1099;
SET hive.exec.reducers.min=1;
SET mapred.reduce.tasks=8;
SET mapred.map.tasks=8;
SET mapred.max.split.size=268435456;
SET hive.exec.max.dynamic.partitions=100000;
SET hive.exec.max.dynamic.partitions.pernode=100000;
SET hive.exec.max.created.files=100000;
SET mapreduce.job.queuename=default;
SET hive.exec.submitviachild=true;
SET hive.downloaded.resources.dir=/tmp/$USER/hive_resources;
SET hive.scratch.dir=/tmp/$USER/hive_scratch;
SET hive.server2.enable.impersonation=true;
SET hive.mapred.local.mode=true;
SET hive.auto.convert.join=true;
SET hive.mapjoin.smalltable.filesize=25000000;
SET hive.auto.convert.join.noconditionaltask=true;
SET hive.compute.query.using.stats=true;
SET hive.stats.fetch.column.stats=true;
SET hive.stats.fetch.partition.stats=true;
SET hive.security.authorization

0
亚洲午夜精品一区二区_中文无码日韩欧免_久久香蕉精品视频_欧美主播一区二区三区美女