# Default system properties included when running spark-submit. # This is useful for setting default environmental settings. # Example: # spark.master spark://master:7077 # spark.eventLog.enabled true # spark.eventLog.dir hdfs://namenode:8021/directory # spark.serializer org.apache.spark.serializer.KryoSerializer # spark.driver.memory 5g # spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three" #spark.eventLog.dir=hdfs://10.211.55.101/user/spark/applicationHistory #spark.eventLog.dir hdfs://node1:8021/user/spark/applicationHistory spark.eventLog.dir hdfs://node1/user/spark/applicationHistory #spark.eventLog.dir hdfs:///user/spark/applicationHistory spark.yarn.historyServer.address=10.211.55.101:18080 spark.eventLog.enabled=true spark.yarn.archive hdfs://node1/user/spark/spark-libs.jar ## -- START GS TEAM INSERT --- ## # Config settings and authentication details # for the created IAM role for s3a data that uses the created commoncrawl policy # Edit the access and secret keys below: spark.hadoop.fs.s3a.impl org.apache.hadoop.fs.s3a.S3AFileSystem spark.hadoop.fs.s3a.access.key TYPE_AWS_IAM_ROLE_ACCESSKEY_HERE spark.hadoop.fs.s3a.secret.key TYPE_AWS_IAM_ROLE_SECRETKEY_HERE ## -- END GS TEAM INSERT --- ##