Code
from pyspark.conf import SparkConf
from pyspark.context import SparkContext
conf = SparkConf().setAll([('spark.app.name', '2_test_sparkconf'),
('spark.master', 'spark://spark-master:17077')])
sc = SparkContext(conf = conf)
print('first')
for setting in sc._conf.getAll():
print(setting)
sc.stop()
conf = SparkConf().setAll([('spark.app.name', '2_test_sparkconf'),
('spark.master', 'spark://spark-master:17077'),
('spark.driver.cores', '1'),
('spark.driver.memory','1g'),
('spark.executor.memory', '1g'),
('spark.executor.cores', '2'),
('spark.cores.max', '2')])
sc = SparkContext(conf = conf)
print('second')
for setting in sc._conf.getAll():
print(setting)
sc.stop()
Result
first
('spark.master', 'spark://spark-master:17077')
('spark.rdd.compress', 'True')
('spark.driver.host', 'spark-client')
('spark.app.name', '2_test_sparkconf')
('spark.serializer.objectStreamReset', '100')
('spark.driver.port', '39345')
('spark.submit.pyFiles', '')
('spark.executor.id', 'driver')
('spark.submit.deployMode', 'client')
('spark.app.id', 'app-20220604103943-0049')
('spark.app.startTime', '1654339181825')
('spark.ui.showConsoleProgress', 'true')
second
('spark.app.id', 'app-20220604103945-0050')
('spark.master', 'spark://spark-master:17077')
('spark.driver.host', 'spark-client')
('spark.executor.id', 'driver')
('spark.driver.memory', '1g')
('spark.executor.cores', '2')
('spark.app.startTime', '1654339185050')
('spark.driver.port', '44035')
('spark.executor.memory', '1g')
('spark.rdd.compress', 'True')
('spark.app.name', '2_test_sparkconf')
('spark.cores.max', '2')
('spark.serializer.objectStreamReset', '100')
('spark.submit.pyFiles', '')
('spark.submit.deployMode', 'client')
('spark.ui.showConsoleProgress', 'true')
('spark.driver.cores', '1')
'Data Engineering > Spark' 카테고리의 다른 글
[Spark] How to use the Global Temporary View (0) | 2022.06.04 |
---|---|
[Spark] Basic way to use spark data frames with sql query (0) | 2022.06.04 |
[Spark] To output the default settings for a Spark Session (0) | 2022.06.04 |
[Spark] To output spark settings from pyspark code (0) | 2022.06.04 |
[Spark] How to adjust spark memory in pyspark code (0) | 2022.06.04 |