1. 程式人生 > >使用sqoop將mysql 資料匯入hdfs時各種報錯

使用sqoop將mysql 資料匯入hdfs時各種報錯

資訊

  1. 18/06/29 10:45:08 ERROR sqoop.Sqoop: Got exception running Sqoop: java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  2. java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  3. at org.apache.sqoop.mapreduce.db.DBInputFormat.setDbConf(DBInputFormat.java:170)

  4. at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:161)

  5. at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)

  6. at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)

  7. at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:299)

  8. at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:318)

  9. at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:196)

  10. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)

  11. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)

  12. at java.security.AccessController.doPrivileged(Native Method)

  13. at javax.security.auth.Subject.doAs(Subject.java:422)

  14. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1758)

  15. at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)

  16. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)

  17. at org.apache.sqoop.mapreduce.ImportJobBase.doSubmitJob(ImportJobBase.java:200)

  18. at org.apache.sqoop.mapreduce.ImportJobBase.runJob(ImportJobBase.java:173)

  19. at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:270)

  20. at org.apache.sqoop.manager.SqlManager.importTable(SqlManager.java:692)

  21. at org.apache.sqoop.manager.MySQLManager.importTable(MySQLManager.java:127)

  22. at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:520)

  23. at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)

  24. at org.apache.sqoop.Sqoop.run(Sqoop.java:147)

  25. at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)

  26. at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)

  27. at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)

  28. at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)

  29. at org.apache.sqoop.Sqoop.main(Sqoop.java:252)

  30. Caused by: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  31. at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:223)

  32. at org.apache.sqoop.mapreduce.db.DBInputFormat.setDbConf(DBInputFormat.java:168)

  33. ... 26 more

  34. Caused by: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  35. at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:127)

  36. at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:95)

  37. at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:87)

  38. at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:61)

  39. at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:71)

  40. at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:85)

  41. at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:440)

  42. at com.mysql.cj.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:230)

  43. at com.mysql.cj.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:226)

  44. at java.sql.DriverManager.getConnection(DriverManager.java:664)

  45. at java.sql.DriverManager.getConnection(DriverManager.java:247)

  46. at org.apache.sqoop.mapreduce.db.DBConfiguration.getConnection(DBConfiguration.java:302)

  47. at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:216)

  48. ... 27 more

  49. Caused by: com.mysql.cj.exceptions.WrongArgumentException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  50. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

  51. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

  52. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

  53. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

  54. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:59)

  55. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:103)

  56. at com.mysql.cj.conf.DefaultPropertySet.initializeProperties(DefaultPropertySet.java:194)

  57. at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:382)

  58. ... 33 more

  59. Caused by: com.mysql.cj.exceptions.CJException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.

  60. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

  61. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

  62. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

  63. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

  64. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:59)

  65. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:103)

  66. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:149)

  67. at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:122)

  68. at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:64)

  69. at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:39)

  70. at com.mysql.cj.conf.AbstractRuntimeProperty.setFromString(AbstractRuntimeProperty.java:96)

  71. at com.mysql.cj.conf.AbstractRuntimeProperty.initializeFrom(AbstractRuntimeProperty.java:91)

  72. at com.mysql.cj.conf.AbstractRuntimeProperty.initializeFrom(AbstractRuntimeProperty.java:75)

  73. at com.mysql.cj.conf.DefaultPropertySet.initializeProperties(DefaultPropertySet.java:191)

  74. ... 34 more

  75. Caused by: java.lang.IllegalArgumentException: No enum constant com.mysql.cj.conf.PropertyDefinitions.ZeroDatetimeBehavior.CONVERTTONULL

  76. at java.lang.Enum.valueOf(Enum.java:238)

  77. at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:62)

  78. ... 39 more

原命令是:

sqoop import --connect jdbc:mysql://localhost:3306/FileManager  --username root --password #### --table user 

將 fileManager資料倉庫中的表user  匯入到 hdfs

在連線mysql時的連線中加入 

?zeroDateTimeBehavior=EXCEPTION

後來修改為

sqoop import --connect jdbc:mysql://localhost:3306/FileManager?zeroDateTimeBehavior=EXCEPTION --username root --password #### --table user 

 

就可以執行了

 

問題1:sqoop import匯入時報java.lang.ClassNotFoundException: org.json.JSONObject 錯誤

 

 
  1. [[email protected] lib]# sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1

  2. 16/06/07 08:48:59 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0

  3. 16/06/07 08:48:59 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.

  4. 16/06/07 08:48:59 INFO manager.MySQLManager: Preparing to use a MySQL streaming resultset.

  5. 16/06/07 08:48:59 INFO tool.CodeGenTool: Beginning code generation

  6. 16/06/07 08:48:59 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `t1` AS t LIMIT 1

  7. 16/06/07 08:48:59 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `t1` AS t LIMIT 1

  8. 16/06/07 08:48:59 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/cloudera/parcels/CDH/lib/hadoop-mapreduce

  9. Note: /tmp/sqoop-root/compile/07751371c513f90a6377d7b482c4a910/t1.java uses or overrides a deprecated API.

  10. Note: Recompile with -Xlint:deprecation for details.

  11. 16/06/07 08:49:01 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/07751371c513f90a6377d7b482c4a910/t1.jar

  12. 16/06/07 08:49:01 WARN manager.MySQLManager: It looks like you are importing from mysql.

  13. 16/06/07 08:49:01 WARN manager.MySQLManager: This transfer can be faster! Use the --direct

  14. 16/06/07 08:49:01 WARN manager.MySQLManager: option to exercise a MySQL-specific fast path.

  15. 16/06/07 08:49:01 INFO manager.MySQLManager: Setting zero DATETIME behavior to convertToNull (mysql)

  16. 16/06/07 08:49:01 INFO mapreduce.ImportJobBase: Beginning import of t1

  17. Exception in thread "main" <span style="color:#ff0000;">java.lang.NoClassDefFoundError: org/json/JSONObject</span>

  18. at org.apache.sqoop.util.SqoopJsonUtil.getJsonStringforMap(SqoopJsonUtil.java:42)

  19. at org.apache.sqoop.SqoopOptions.writeProperties(SqoopOptions.java:742)

  20. at org.apache.sqoop.mapreduce.JobBase.putSqoopOptionsToConfiguration(JobBase.java:369)

  21. at org.apache.sqoop.mapreduce.JobBase.createJob(JobBase.java:355)

  22. at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:249)

  23. at org.apache.sqoop.manager.SqlManager.importTable(SqlManager.java:692)

  24. at org.apache.sqoop.manager.MySQLManager.importTable(MySQLManager.java:118)

  25. at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:497)

  26. at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)

  27. at org.apache.sqoop.Sqoop.run(Sqoop.java:143)

  28. at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)

  29. at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:179)

  30. at org.apache.sqoop.Sqoop.runTool(Sqoop.java:218)

  31. at org.apache.sqoop.Sqoop.runTool(Sqoop.java:227)

  32. at org.apache.sqoop.Sqoop.main(Sqoop.java:236)

  33. Caused by: <span style="color:#ff0000;">java.lang.ClassNotFoundException: org.json.JSONObject</span>

  34. at java.net.URLClassLoader$1.run(URLClassLoader.java:366)

  35. at java.net.URLClassLoader$1.run(URLClassLoader.java:355)

  36. at java.security.AccessController.doPrivileged(Native Method)

  37. at java.net.URLClassLoader.findClass(URLClassLoader.java:354)

  38. at java.lang.ClassLoader.loadClass(ClassLoader.java:425)

  39. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)

  40. at java.lang.ClassLoader.loadClass(ClassLoader.java:358)

  41. ... 15 more

 

 

解決:

這是因為sqoop缺少java-json.jar包.

如下面連結所說:

http://stackoverflow.com/questions/27504508/java-lang-noclassdeffounderror-org-json-jsonobject

 

The Exception it self says itall java.lang.ClassNotFoundException: org.json.JSONObject

You have not added thenecessary jar file which will be having org.json.JSONObject class to yourclasspath.

 

下載java-json.jar包:

http://www.java2s.com/Code/Jar/j/Downloadjavajsonjar.htm

 

把java-json.jar新增到../sqoop/lib目錄:

# cp java-json.jar/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/sqoop/lib

[[email protected] lib]# pwd

/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/sqoop/lib

[[email protected] lib]#

[[email protected] lib]$ ll java-json.jar

-rw-r--r-- 1 root root 84697 Oct 16  2013 java-json.jar

 


問題2:root使用者寫入HDFS檔案錯誤 "Permission denied: user=root"

 

 
  1. [[email protected] lib]# sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1

  2. 16/06/07 08:49:50 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0

  3. 16/06/07 08:49:50 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.

  4. ... ...

  5. 16/06/07 08:49:52 INFO manager.MySQLManager: Setting zero DATETIME behavior to convertToNull (mysql)

  6. 16/06/07 08:49:52 INFO mapreduce.ImportJobBase: Beginning import of t1

  7. 16/06/07 08:49:53 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar

  8. 16/06/07 08:49:53 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps

  9. 16/06/07 08:49:53 INFO client.RMProxy: Connecting to ResourceManager at hadoop0.hadoop.com/10.1.32.239:8032

  10. 16/06/07 08:49:54 <span style="color:#ff0000;">WARN security.UserGroupInformation: PriviledgedActionException as:root (auth:SIMPLE) cause:org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x</span>

  11. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)

  12. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)

  13. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)

  14. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)

  15. at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)

  16. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)

  17. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)

  18. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)

  19. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)

  20. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)

  21. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)

  22. at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)

  23. at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)

  24. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)

  25. at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

  26. at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

  27. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

  28. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

  29. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

  30. at java.security.AccessController.doPrivileged(Native Method)

  31. at javax.security.auth.Subject.doAs(Subject.java:415)

  32. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

  33. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

  34.  
  35. 16/06/07 08:49:54 ERROR tool.ImportTool: Encountered IOException running import job: org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x

  36. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)

  37. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)

  38. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)

  39. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)

  40. at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)

  41. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)

  42. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)

  43. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)

  44. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)

  45. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)

  46. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)

  47. at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)

  48. at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)

  49. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)

  50. at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

  51. at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

  52. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

  53. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

  54. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

  55. at java.security.AccessController.doPrivileged(Native Method)

  56. at javax.security.auth.Subject.doAs(Subject.java:415)

  57. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

  58. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

  59.  
  60. Caused by: <span style="color:#ff0000;">org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x</span>

  61. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)

  62. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)

  63. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)

  64. at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)

  65. at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)

  66. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)

  67. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)

  68. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)

  69. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)

  70. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)

  71. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)

  72. at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)

  73. at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)

  74. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)

  75. at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

  76. at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

  77. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

  78. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

  79. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

  80. at java.security.AccessController.doPrivileged(Native Method)

  81. at javax.security.auth.Subject.doAs(Subject.java:415)

  82. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

  83. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

  84.  
  85. at org.apache.hadoop.ipc.Client.call(Client.java:1471)

  86. at org.apache.hadoop.ipc.Client.call(Client.java:1408)

  87. at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)

  88. at com.sun.proxy.$Proxy15.mkdirs(Unknown Source)

  89. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:544)

  90. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

  91. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

  92. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

  93. at java.lang.reflect.Method.invoke(Method.java:606)

  94. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)

  95. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)

  96. at com.sun.proxy.$Proxy16.mkdirs(Unknown Source)

  97. at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3082)

  98. ... 28 more


 

解決:
因為在使用sqoop匯入HDFS檔案時,使用的是root使用者,沒有寫hdfs檔案的許可權。
CDH安裝時建立了hdfs使用者,應使用hdfs使用者登入,再時行sqoop匯入
[[email protected] ~]# su - hdfs
[[email protected] ~]$ pwd
/var/lib/hadoop-hdfs
[[email protected] ~]$ ls
t1.java

 

問題3:其它hadoop節點不連線MySql "is not allowed to connect to this MySQL server"

 

 
  1. [[email protected] ~]$ sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1

  2. 16/06/07 08:51:52 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0

  3. ......

  4. 16/06/07 08:52:00 INFO mapreduce.Job: The url to track the job: http://hadoop0.hadoop.com:8088/proxy/application_1464249387420_0001/

  5. 16/06/07 08:52:00 INFO mapreduce.Job: Running job: job_1464249387420_0001

  6. 16/06/07 08:52:08 INFO mapreduce.Job: Job job_1464249387420_0001 running in uber mode : false

  7. 16/06/07 08:52:08 INFO mapreduce.Job: map 0% reduce 0%

  8. 16/06/07 08:52:14 INFO mapreduce.Job: Task Id : attempt_1464249387420_0001_m_000000_0, Status : FAILED

  9. <span style="color:#ff0000;">Error: java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"</span>

  10. at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:167)

  11. at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)

  12. at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)

  13. at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:749)

  14. at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)

  15. at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)

  16. at java.security.AccessController.doPrivileged(Native Method)

  17. at javax.security.auth.Subject.doAs(Subject.java:415)

  18. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

  19. at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

  20. Caused by: java.lang.RuntimeException: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"

  21. at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:220)

  22. at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:165)

  23. ... 9 more

  24. Caused by: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"

 

解決:
這是因為客戶端沒有訪問mysql的許可權,修改sqoop使用者的客戶端訪問許可權。
--登入mysql伺服器
[[email protected] ~]# mysql -uroot -proot 
mysql> grant all privileges on *.* to 'sqoop'@'%' identified by 'sqoop' with grant option;
Query OK, 0 rows affected (0.00 sec)