Sample core-site.xml for ECS S3
The following
core-site.xml file is an example of ECS S3 properties.
core-site.xml
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>fs.azure.user.agent.prefix</name>
<value>User-Agent: APN/1.0 Hortonworks/1.0 HDP/</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://<<hostname:8020>></value>
<final>true</final>
</property>
<property>
<name>fs.s3a.access.key</name>
<value><<userid>></value>
</property>
<property>
<name>fs.s3a.acl.default</name>
<value>PublicReadWrite</value>
</property>
<property>
<name>fs.s3a.block.size</name>
<value>32M</value>
</property>
<property>
<name>fs.s3a.bucket.s3aTestBucket.access.key</name>
<value>ambari-qa</value>
</property>
<property>
<name>fs.s3a.bucket.s3aTestBucket.secret.key</name>
<value>secret_key</value>
</property>
<property>
<name>fs.s3a.buffer.dir</name>
<value>${hadoop.tmp.dir}/s3a</value>
</property>
<property>
<name>fs.s3a.connection.establish.timeout</name>
<value>5000</value>
</property>
<property>
<name>fs.s3a.connection.maximum</name>
<value>15</value>
</property>
<property>
<name>fs.s3a.connection.ssl.enabled</name>
<value>false</value>
</property>
<property>
<name>fs.s3a.connection.timeout</name>
<value>200000</value>
</property>
<property>
<name>fs.s3a.endpoint</name>
<value><<Host IP address>></value>
</property>
<property>
<name>fs.s3a.fast.upload</name>
<value>true</value>
</property>
<property>
<name>fs.s3a.fast.upload.buffer</name>
<value>disk</value>
</property>
<property>
<name>fs.s3a.max.total.tasks</name>
<value>5</value>
</property>
<property>
<name>fs.s3a.multiobjectdelete.enable</name>
<value>true</value>
</property>
<property>
<name>fs.s3a.multipart.purge</name>
<value>false</value>
</property>
<property>
<name>fs.s3a.multipart.purge.age</name>
<value>86400</value>
</property>
<property>
<name>fs.s3a.multipart.size</name>
<value>67108864</value>
</property>
<property>
<name>fs.s3a.multipart.threshold</name>
<value>2147483647</value>
</property>
<property>
<name>fs.s3a.paging.maximum</name>
<value>1000</value>
</property>
<property>
<name>fs.s3a.path.style.access</name>
<value>false</value>
</property>
<property>
<name>fs.s3a.readahead.range</name>
<value>64K</value>
</property>
<property>
<name>fs.s3a.secret.key</name>
<value><<secret key>></value>
</property>
<property>
<name>fs.s3a.socket.recv.buffer</name>
<value>8192</value>
</property>
<property>
<name>fs.s3a.socket.send.buffer</name>
<value>8192</value>
</property>
<property>
<name>fs.s3a.threads.keepalivetime</name>
<value>60</value>
</property>
<property>
<name>fs.s3a.threads.max</name>
<value>10</value>
</property>
<property>
<name>fs.s3a.user.agent.prefix</name>
<value>User-Agent: APN/1.0 Hortonworks/1.0 HDP/</value>
</property>
<property>
<name>fs.trash.interval</name>
<value>360</value>
</property>
<property>
<name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
<value>120</value>
</property>
<property>
<name>ha.zookeeper.acl</name>
<value>sasl:nn:rwcda</value>
</property>
<property>
<name>hadoop.http.authentication.cookie.domain</name>
<value>centera.lab.emc.com</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.principal</name>
<value>HTTP/_HOST@SM.EMC.COM</value>
</property>
<property>
<name>hadoop.http.authentication.signature.secret.file</name>
<value>/etc/security/http_secret</value>
</property>
<property>
<name>hadoop.http.authentication.simple.anonymous.allowed</name>
<value>false</value>
</property>
<property>
<name>hadoop.http.authentication.type</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-headers</name>
<value>X-Requested-With,Content-Type,Accept,Origin,WWW-Authenticate,Accept-Encoding,Transfer-Encoding</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-methods</name>
<value>GET,PUT,POST,OPTIONS,HEAD,DELETE</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-origins</name>
<value>*</value>
</property>
<property>
<name>hadoop.http.cross-origin.max-age</name>
<value>1800</value>
</property>
<property>
<name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.security.AuthenticationFilterInitializer,org.apache.hadoop.security.HttpCrossOriginFilterInitializer</value>
</property>
<property>
<name>hadoop.proxyuser.ambari-server.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.ambari-server.hosts</name>
<value><Ambari server fully qualified name>></value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value><<hive host>></value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.groups</name>
<value>users</value>
</property>
<property>
<name>hadoop.proxyuser.knox.groups</name>
<value>users</value>
</property>
<property>
<name>hadoop.proxyuser.knox.hosts</name>
<value><<knox host>></value>
</property>
<property>
<name>hadoop.proxyuser.livy.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value><<yarn host>></value>
</property>
<property>
<name>hadoop.rpc.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>RULE:[1:$1@$0](ambari-qa@SM.EMC.COM)s/.*/ambari-qa/
RULE:[1:$1@$0](hbase@SM.EMC.COM)s/.*/hbase/
RULE:[1:$1@$0](hdfs@SM.EMC.COM)s/.*/hdfs/
RULE:[1:$1@$0](spark@SM.EMC.COM)s/.*/spark/
RULE:[1:$1@$0](yarn-ats@SM.EMC.COM)s/.*/yarn-ats/
RULE:[1:$1@$0](.*@SM.EMC.COM)s/@.*//
RULE:[2:$1@$0](activity_analyzer@SM.EMC.COM)s/.*/activity_analyzer/
RULE:[2:$1@$0](activity_explorer@SM.EMC.COM)s/.*/activity_explorer/
RULE:[2:$1@$0](amshbase@SM.EMC.COM)s/.*/ams/
RULE:[2:$1@$0](amsmon@SM.EMC.COM)s/.*/ams/
RULE:[2:$1@$0](amszk@SM.EMC.COM)s/.*/ams/
RULE:[2:$1@$0](atlas@SM.EMC.COM)s/.*/atlas/
RULE:[2:$1@$0](dn@SM.EMC.COM)s/.*/hdfs/
RULE:[2:$1@$0](hbase@SM.EMC.COM)s/.*/hbase/
RULE:[2:$1@$0](hive@SM.EMC.COM)s/.*/hive/
RULE:[2:$1@$0](jhs@SM.EMC.COM)s/.*/mapred/
RULE:[2:$1@$0](knox@SM.EMC.COM)s/.*/knox/
RULE:[2:$1@$0](nm@SM.EMC.COM)s/.*/yarn/
RULE:[2:$1@$0](nn@SM.EMC.COM)s/.*/hdfs/
RULE:[2:$1@$0](rm@SM.EMC.COM)s/.*/yarn/
RULE:[2:$1@$0](spark@SM.EMC.COM)s/.*/spark/
RULE:[2:$1@$0](yarn@SM.EMC.COM)s/.*/yarn/
RULE:[2:$1@$0](yarn-ats-hbase@SM.EMC.COM)s/.*/yarn-ats/
DEFAULT</value>
</property>
<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>
<property>
<name>hadoop.security.instrumentation.requires.admin</name>
<value>false</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization</value>
</property>
<property>
<name>ipc.client.connect.max.retries</name>
<value>50</value>
</property>
<property>
<name>ipc.client.connection.maxidletime</name>
<value>30000</value>
</property>
<property>
<name>ipc.client.idlethreshold</name>
<value>8000</value>
</property>
<property>
<name>ipc.server.tcpnodelay</name>
<value>true</value>
</property>
<property>
<name>mapreduce.jobtracker.webinterface.trusted</name>
<value>false</value>
</property>
<property>
<name>net.topology.script.file.name</name>
<value>/etc/hadoop/conf/topology_script.py</value>
</property>
</configuration>