-
Notifications
You must be signed in to change notification settings - Fork 7
feat: use SLF4J with Spark default backend for logging #69
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -76,7 +76,9 @@ import org.apache.spark.deploy.k8s.features.{ | |||||||||||||||
| KubernetesFeatureConfigStep | ||||||||||||||||
| } | ||||||||||||||||
| import org.apache.spark.scheduler.cluster.SchedulerBackendUtils | ||||||||||||||||
| import org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBuilder | ||||||||||||||||
| import org.apache.spark.util.Utils | ||||||||||||||||
| import org.slf4j.{Logger, LoggerFactory} | ||||||||||||||||
|
Comment on lines
78
to
+81
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||||||||||||
|
|
||||||||||||||||
| import scala.collection.mutable | ||||||||||||||||
| import scala.jdk.CollectionConverters._ | ||||||||||||||||
|
|
@@ -145,18 +147,12 @@ private[spark] object ArmadaClientApplication { | |||||||||||||||
| private val DEFAULT_NAMESPACE = "default" | ||||||||||||||||
| private val DEFAULT_ARMADA_APP_ID = "armada-spark-app-id" | ||||||||||||||||
| private val DEFAULT_RUN_AS_USER = 185 | ||||||||||||||||
|
|
||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| /** Main class and entry point of application submission in KUBERNETES mode. | ||||||||||||||||
| */ | ||||||||||||||||
| private[spark] class ArmadaClientApplication extends SparkApplication { | ||||||||||||||||
| // FIXME: Find the real way to log properly. | ||||||||||||||||
| private def log(msg: String): Unit = { | ||||||||||||||||
| // scalastyle:off println | ||||||||||||||||
| System.err.println(msg) | ||||||||||||||||
| // scalastyle:on println | ||||||||||||||||
| } | ||||||||||||||||
| private val logger = LoggerFactory.getLogger(getClass) | ||||||||||||||||
|
|
||||||||||||||||
| override def start(args: Array[String], conf: SparkConf): Unit = { | ||||||||||||||||
| val parsedArguments = ClientArguments.fromCommandLineArgs(args) | ||||||||||||||||
|
|
@@ -170,17 +166,17 @@ private[spark] class ArmadaClientApplication extends SparkApplication { | |||||||||||||||
| val armadaJobConfig = validateArmadaJobConfig(sparkConf, clientArguments) | ||||||||||||||||
|
|
||||||||||||||||
| val (host, port) = ArmadaUtils.parseMasterUrl(sparkConf.get("spark.master")) | ||||||||||||||||
| log(s"Connecting to Armada Server - host: $host, port: $port") | ||||||||||||||||
| logger.info(s"Connecting to Armada Server - host: $host, port: $port") | ||||||||||||||||
|
|
||||||||||||||||
| val armadaClient = ArmadaClient(host, port, useSsl = false, sparkConf.get(ARMADA_AUTH_TOKEN)) | ||||||||||||||||
| val healthTimeout = | ||||||||||||||||
| Duration(sparkConf.get(ARMADA_HEALTH_CHECK_TIMEOUT), SECONDS) | ||||||||||||||||
|
|
||||||||||||||||
| log(s"Checking Armada Server health (timeout: $healthTimeout)") | ||||||||||||||||
| logger.info(s"Checking Armada Server health (timeout: $healthTimeout)") | ||||||||||||||||
| val healthResp = Await.result(armadaClient.submitHealth(), healthTimeout) | ||||||||||||||||
|
|
||||||||||||||||
| if (healthResp.status.isServing) { | ||||||||||||||||
| log("Armada Server is serving requests!") | ||||||||||||||||
| logger.info("Armada Server is serving requests!") | ||||||||||||||||
| } else { | ||||||||||||||||
| throw new RuntimeException( | ||||||||||||||||
| "Armada health check failed - Armada Server is not serving requests!" | ||||||||||||||||
|
|
@@ -196,7 +192,7 @@ private[spark] class ArmadaClientApplication extends SparkApplication { | |||||||||||||||
| val lookoutURL = | ||||||||||||||||
| s"$lookoutBaseURL/?page=0&sort[id]=jobId&sort[desc]=true&" + | ||||||||||||||||
| s"ps=50&sb=$driverJobId&active=false&refresh=true" | ||||||||||||||||
| log(s"Lookout URL for the driver job is $lookoutURL") | ||||||||||||||||
| logger.info(s"Lookout URL for the driver job is $lookoutURL") | ||||||||||||||||
|
|
||||||||||||||||
| () | ||||||||||||||||
| } | ||||||||||||||||
|
|
@@ -745,7 +741,7 @@ private[spark] class ArmadaClientApplication extends SparkApplication { | |||||||||||||||
| val error = Some(driverResponse.jobResponseItems.head.error) | ||||||||||||||||
| .filter(_.nonEmpty) | ||||||||||||||||
| .getOrElse("none") | ||||||||||||||||
| log( | ||||||||||||||||
| logger.info( | ||||||||||||||||
| s"Submitted driver job with ID: $driverJobId, Error: $error" | ||||||||||||||||
| ) | ||||||||||||||||
| driverJobId | ||||||||||||||||
|
|
@@ -760,7 +756,7 @@ private[spark] class ArmadaClientApplication extends SparkApplication { | |||||||||||||||
| val executorsResponse = armadaClient.submitJobs(queue, jobSetId, executors) | ||||||||||||||||
| executorsResponse.jobResponseItems.map { item => | ||||||||||||||||
| val error = Some(item.error).filter(_.nonEmpty).getOrElse("none") | ||||||||||||||||
| log(s"Submitted executor job with ID: ${item.jobId}, Error: $error") | ||||||||||||||||
| logger.info(s"Submitted executor job with ID: ${item.jobId}, Error: $error") | ||||||||||||||||
| item.jobId | ||||||||||||||||
| } | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,7 @@ | ||
| rootLogger.level = debug | ||
| rootLogger.appenderRef.console.ref = console | ||
|
|
||
| appender.console.type = Console | ||
| appender.console.name = console | ||
| appender.console.layout.type = PatternLayout | ||
| appender.console.layout.pattern = %d{HH:mm:ss.SSS} %-5level %logger{20} - %msg%n | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
spark-core). Declaringslf4j-apiat the defaultcompilescope is unnecessary — usingprovidedmakes the intent explicit and mirrors how the other Spark dependencies are declared, while still being available for compilation.