1. 程式人生 > >storm.yaml配置注意事項

storm.yaml配置注意事項

Strom.yaml配置完後,啟動storm報錯: [Hadoop@slq storm-0.9.1]$ bin/storm nimbus & [2] 15949 [[email protected] storm-0.9.1]$ Exception in thread "main" expected '<document start>', but found BlockMappingStart in 'reader', line 24, column 1:     nimbus.host: "192.168.1.101"
    ^         at org.yaml.snakeyaml.parser.ParserImpl$ParseDocumentStart.produce(ParserImpl.Java:225)         at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)        
at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:143)         at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)         at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:120)
        at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:481)         at org.yaml.snakeyaml.Yaml.load(Yaml.java:424)         at backtype.storm.utils.Utils.findAndReadConfigFile(Utils.java:138)         at backtype.storm.utils.Utils.readStormConfig(Utils.java:178)         at backtype.storm.config$read_storm_config.invoke(config.clj:116)         at backtype.storm.command.config_value$_main.invoke(config_value.clj:22)         at clojure.lang.AFn.applyToHelper(AFn.java:161)         at clojure.lang.AFn.applyTo(AFn.java:151)         at backtype.storm.command.config_value.main(Unknown Source) Exception in thread "main" expected '<document start>', but found BlockMappingStart in 'reader', line 24, column 1:     nimbus.host: "192.168.1.101"     ^         at org.yaml.snakeyaml.parser.ParserImpl$ParseDocumentStart.produce(ParserImpl.java:225)         at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)         at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:143)         at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)         at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:120)         at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:481)         at org.yaml.snakeyaml.Yaml.load(Yaml.java:424)         at backtype.storm.utils.Utils.findAndReadConfigFile(Utils.java:138)         at backtype.storm.utils.Utils.readStormConfig(Utils.java:178)         at backtype.storm.config$read_storm_config.invoke(config.clj:116)         at backtype.storm.command.config_value$_main.invoke(config_value.clj:22)         at clojure.lang.AFn.applyToHelper(AFn.java:161)         at clojure.lang.AFn.applyTo(AFn.java:151)         at backtype.storm.command.config_value.main(Unknown Source)

Running: java -server -Dstorm.options= -Dstorm.home=/home/hadoop/storm-0.9.1 -Djava.library.path= -Dstorm.conf.file= -cp /home/hadoop/storm-0.9.1/lib/storm-core-0.9.1-incubating.jar:/home/hadoop/storm-0.9.1/lib/clojure-1.4.0.jar:/home/hadoop/storm-0.9.1/lib/clj-time-0.4.1.jar:/home/hadoop/storm-0.9.1/lib/joda-time-2.0.jar:/home/hadoop/storm-0.9.1/lib/compojure-1.1.3.jar:/home/hadoop/storm-0.9.1/lib/core.incubator-0.1.0.jar:/home/hadoop/storm-0.9.1/lib/tools.macro-0.1.0.jar:/home/hadoop/storm-0.9.1/lib/clout-1.0.1.jar:/home/hadoop/storm-0.9.1/lib/ring-core-1.1.5.jar:/home/hadoop/storm-0.9.1/lib/commons-codec-1.4.jar:/home/hadoop/storm-0.9.1/lib/commons-io-1.4.jar:/home/hadoop/storm-0.9.1/lib/commons-fileupload-1.2.1.jar:/home/hadoop/storm-0.9.1/lib/servlet-api-2.5.jar:/home/hadoop/storm-0.9.1/lib/hiccup-0.3.6.jar:/home/hadoop/storm-0.9.1/lib/ring-devel-0.3.11.jar:/home/hadoop/storm-0.9.1/lib/clj-stacktrace-0.2.4.jar:/home/hadoop/storm-0.9.1/lib/ring-jetty-adapter-0.3.11.jar:/home/hadoop/storm-0.9.1/lib/ring-servlet-0.3.11.jar:/home/hadoop/storm-0.9.1/lib/jetty-6.1.26.jar:/home/hadoop/storm-0.9.1/lib/jetty-util-6.1.26.jar:/home/hadoop/storm-0.9.1/lib/servlet-api-2.5-20081211.jar:/home/hadoop/storm-0.9.1/lib/tools.logging-0.2.3.jar:/home/hadoop/storm-0.9.1/lib/math.numeric-tower-0.0.1.jar:/home/hadoop/storm-0.9.1/lib/tools.cli-0.2.2.jar:/home/hadoop/storm-0.9.1/lib/commons-exec-1.1.jar:/home/hadoop/storm-0.9.1/lib/commons-lang-2.5.jar:/home/hadoop/storm-0.9.1/lib/curator-framework-1.0.1.jar:/home/hadoop/storm-0.9.1/lib/curator-client-1.0.1.jar:/home/hadoop/storm-0.9.1/lib/slf4j-api-1.6.5.jar:/home/hadoop/storm-0.9.1/lib/zookeeper-3.3.3.jar:/home/hadoop/storm-0.9.1/lib/jline-2.11.jar:/home/hadoop/storm-0.9.1/lib/junit-3.8.1.jar:/home/hadoop/storm-0.9.1/lib/guava-13.0.jar:/home/hadoop/storm-0.9.1/lib/json-simple-1.1.jar:/home/hadoop/storm-0.9.1/lib/carbonite-1.3.2.jar:/home/hadoop/storm-0.9.1/lib/kryo-2.17.jar:/home/hadoop/storm-0.9.1/lib/reflectasm-1.07-shaded.jar:/home/hadoop/storm-0.9.1/lib/asm-4.0.jar:/home/hadoop/storm-0.9.1/lib/minlog-1.2.jar:/home/hadoop/storm-0.9.1/lib/objenesis-1.2.jar:/home/hadoop/storm-0.9.1/lib/meat-locker-0.3.1.jar:/home/hadoop/storm-0.9.1/lib/snakeyaml-1.11.jar:/home/hadoop/storm-0.9.1/lib/httpclient-4.1.1.jar:/home/hadoop/storm-0.9.1/lib/httpcore-4.1.jar:/home/hadoop/storm-0.9.1/lib/commons-logging-1.1.1.jar:/home/hadoop/storm-0.9.1/lib/disruptor-2.10.1.jar:/home/hadoop/storm-0.9.1/lib/jgrapht-core-0.9.0.jar:/home/hadoop/storm-0.9.1/lib/logback-classic-1.0.6.jar:/home/hadoop/storm-0.9.1/lib/logback-core-1.0.6.jar:/home/hadoop/storm-0.9.1/lib/log4j-over-slf4j-1.6.6.jar:/home/hadoop/storm-0.9.1/lib/netty-3.6.3.Final.jar:/home/hadoop/storm-0.9.1/conf -Dlogfile.name=nimbus.log

我的strom.yaml配置檔案如下:

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

########### These MUST be filled in for a storm configuration
 storm.zookeeper.servers:
     - "192.168.241.129:2181"
# 
 nimbus.seeds: ["192.168.241.129"]
# 
# 
# ##### These may optionally be filled in:
#    
## List of custom serializations
# topology.kryo.register:
#     - org.mycompany.MyType
#     - org.mycompany.MyType2: org.mycompany.MyType2Serializer
#
## List of custom kryo decorators
# topology.kryo.decorators:
#     - org.mycompany.MyDecorator
#
## Locations of the drpc servers
# drpc.servers:
#     - "server1"
#     - "server2"
 supervisor.slots.ports:
        - 6700
        - 6701
## Metrics Consumers
# topology.metrics.consumer.register:
#   - class: "org.apache.storm.metric.LoggingMetricsConsumer"
#     parallelism.hint: 1
#   - class: "org.mycompany.MyMetricsConsumer"
#     parallelism.hint: 1
#     argument:
#       - endpoint: "metrics-collector.mycompany.org"
 storm.local.dir: "/usr/local/share/storm/apache-storm-1.0.5/data"

報上面這個錯的根本原因是由於我在配置supervisor.slots.ports:時候前面沒有加空格。坑死

大家配置storm.yaml時一定要注意了。少一個空格就會啟動不了。

空格nimbus.host: "192.168.1.101" 空格storm.zookeeper.port: 2181 空格storm.local.dir: "home/hadoop/storm-0.9.1/data" 空格supervisor.slots.ports: