# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Hadoop Cluster on AWS EC2 # # Read the Configuration Guide for more info: # http://incubator.apache.org/whirr/configuration-guide.html # Change the cluster name here whirr.cluster-name=hadoop # Change the number of machines in the cluster here whirr.instance-templates=1 hadoop-namenode+hadoop-jobtracker,5 hadoop-datanode+hadoop-tasktracker # Uncomment out these lines to run CDH #whirr.hadoop-install-function=install_cdh_hadoop #whirr.hadoop-configure-function=configure_cdh_hadoop # For EC2 set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables. whirr.provider=aws-ec2 whirr.identity=${env:AWS_ACCESS_KEY_ID} whirr.credential=${env:AWS_SECRET_ACCESS_KEY} # The size of the instance to use. See http://aws.amazon.com/ec2/instance-types/ whirr.hardware-id=c1.xlarge # Ubuntu 10.04 LTS Lucid. See http://alestic.com/ whirr.image-id=us-east-1/ami-da0cf8b3 # If you choose a different location, make sure whirr.image-id is updated too whirr.location-id=us-east-1 # By default use the user system SSH keys. Override them here. # whirr.private-key-file=${sys:user.home}/.ssh/id_rsa # whirr.public-key-file=${whirr.private-key-file}.pub # Expert: override Hadoop properties by setting properties with the prefix # hadoop-common, hadoop-hdfs, hadoop-mapreduce to set Common, HDFS, MapReduce # site properties, respectively. The prefix is removed by Whirr, so that for # example, setting # hadoop-common.fs.trash.interval=1440 # will result in fs.trash.interval being set to 1440 in core-site.xml.