Migrate logging to logback

This commit is contained in:
Ryan Harg 2024-07-26 10:25:20 +02:00
parent 6704cbdbc0
commit e9653e8b76
9 changed files with 68 additions and 80 deletions

View file

@ -45,6 +45,10 @@ dependencies {
testImplementation("io.kotest:kotest-assertions-core:$kotestVersion")
testImplementation("com.willowtreeapps.assertk:assertk:0.28.0")
testImplementation("io.mockk:mockk:1.13.9")
implementation("ch.qos.logback:logback-core:1.5.6")
implementation("ch.qos.logback:logback-classic:1.5.6")
implementation("org.slf4j:slf4j-api:2.0.13")
}
// Apply a specific Java toolchain to ease working on different environments.

View file

@ -5,6 +5,7 @@ import de.rpr.githubreleases.publishing.MastodonClientFactory
import de.rpr.githubreleases.publishing.Publishers
import de.rpr.githubreleases.repository.ReleaseRepository
import okhttp3.OkHttpClient
import org.slf4j.LoggerFactory
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
@ -25,19 +26,21 @@ class App(
private val httpClient: OkHttpClient,
private val publishers: Publishers,
) {
private val log = LoggerFactory.getLogger(this::class.java)
init {
log(config.toString())
log.info(config.toString())
}
fun schedule() {
log("Scheduling app...")
log.info("Scheduling app...")
val executor = Executors.newSingleThreadScheduledExecutor()
executor.scheduleAtFixedRate({ execute() }, 5, config.schedulingDelay, TimeUnit.SECONDS)
}
private fun execute() {
config.accounts.forEach { account ->
log("Processing releases feed for ${account.repo.name}...")
log.info("Processing releases feed for ${account.repo.name}...")
val existingReleases = releaseRepo.getExistingReleases(account.repo)
val feedService = FeedService(account.repo, httpClient)
@ -46,7 +49,16 @@ class App(
val publishedReleases = publisher.sendReleases(newReleases)
releaseRepo.save(publishedReleases)
log("Finished feed processing...")
log.info("Finished feed processing...")
}
}
}
fun logo(applicationName: String) {
val log = LoggerFactory.getLogger("root")
object {}::class.java.classLoader.getResource("logo.txt")?.readText()!!.split("\n")
.forEach {
log.info(it)
}
log.info("$applicationName starting up")
}

View file

@ -2,14 +2,17 @@ package de.rpr.githubreleases
import com.google.gson.Gson
import com.google.gson.GsonBuilder
import de.rpr.githubreleases.LogLevel.ERROR
import de.rpr.githubreleases.model.GithubRepo
import org.slf4j.LoggerFactory
import java.io.InputStream
import java.nio.file.Files
class Config(configInputStream: InputStream) {
constructor(configFile: String = "config.json") : this(Files.newInputStream(configFile.toPath()))
private val log = LoggerFactory.getLogger(this::class.java)
data class Account(
private val accountName: String?,
private val github: String?,
@ -23,21 +26,22 @@ class Config(configInputStream: InputStream) {
val publishingAccessToken get() = mastodonAccessToken!!
fun validate(): Boolean {
val log = LoggerFactory.getLogger(this::class.java)
var valid = true
if (accountName.isNullOrEmpty()) {
ERROR.log("Account should have a name defined.")
log.error("Account should have a name defined.")
valid = false
}
if (github.isNullOrEmpty()) {
ERROR.log("Account should have a github repository defined.")
log.error("Account should have a github repository defined.")
valid = false
}
if (mastodonInstance.isNullOrEmpty()) {
ERROR.log("Account should have a mastodon instance defined.")
log.error("Account should have a mastodon instance defined.")
valid = false
}
if (mastodonAccessToken.isNullOrEmpty()) {
ERROR.log("Account should have a mastodon access token defined.")
log.error("Account should have a mastodon access token defined.")
valid = false
}
return valid
@ -49,7 +53,9 @@ class Config(configInputStream: InputStream) {
}
@Transient
private val gson: Gson = GsonBuilder().setPrettyPrinting().create()
private val gson: Gson = GsonBuilder()
.setPrettyPrinting()
.create()
val accounts: List<Account>
val schedulingDelay: Long = (System.getenv("SCHEDULING_DELAY")?.toLong() ?: 120) * 60
@ -61,7 +67,7 @@ class Config(configInputStream: InputStream) {
accounts = configFile.accounts
if (schedulingDelay < 300) {
ERROR.log("To avoid hammering the source webpage, scheduling delay has to be > 5 minutes")
log.error("To avoid hammering the source webpage, scheduling delay has to be > 5 minutes")
valid = false
}

View file

@ -1,5 +1,6 @@
package de.rpr.githubreleases
import org.slf4j.LoggerFactory
import java.io.File
import java.nio.file.Path
@ -7,10 +8,4 @@ fun String.toFile(): File = File(this)
fun String.toPath(): Path = this.toFile().toPath()
fun logo(applicationName: String) {
object {}::class.java.classLoader.getResource("logo.txt")?.readText()!!.split("\n")
.forEach {
log(it)
}
log("$applicationName starting up")
}

View file

@ -1,48 +0,0 @@
package de.rpr.githubreleases
import java.io.PrintStream
import java.time.LocalDateTime
fun log(
message: String,
logLevel: LogLevel = LogLevel.INFO,
) {
logLevel.log(message)
}
enum class LogLevel(val order: Int, val out: PrintStream) {
DEBUG(1, System.out),
INFO(2, System.out),
ERROR(3, System.err),
;
fun log(message: String) {
val activeLogLevel =
System.getenv("LOG_LEVEL")
?.let { LogLevel.valueOf(it.uppercase()) }
?: LogLevel.valueOf("INFO")
if (activeLogLevel.order <= this.order) {
out.println("${LocalDateTime.now().format(dateTimeFormatter)} $this - $message")
}
}
private val timeFormatter =
java.time.format.DateTimeFormatterBuilder()
.appendValue(java.time.temporal.ChronoField.HOUR_OF_DAY, 2)
.appendLiteral(':')
.appendValue(java.time.temporal.ChronoField.MINUTE_OF_HOUR, 2)
.optionalStart()
.appendLiteral(':')
.appendValue(java.time.temporal.ChronoField.SECOND_OF_MINUTE, 2)
.appendLiteral(".")
.appendValue(java.time.temporal.ChronoField.MILLI_OF_SECOND, 3)
.toFormatter()
private val dateTimeFormatter =
java.time.format.DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(java.time.format.DateTimeFormatter.ISO_LOCAL_DATE)
.appendLiteral('T')
.append(timeFormatter)
.toFormatter()
}

View file

@ -1,12 +1,12 @@
package de.rpr.githubreleases.feed
import com.ouattararomuald.syndication.Syndication
import de.rpr.githubreleases.log
import de.rpr.githubreleases.model.GithubRepo
import de.rpr.githubreleases.model.Release
import de.rpr.githubreleases.model.Releases
import de.rpr.githubreleases.model.asCollection
import okhttp3.OkHttpClient
import org.slf4j.LoggerFactory
import java.time.OffsetDateTime
import java.time.ZoneId
@ -14,6 +14,8 @@ class FeedService(
private val githubRepo: GithubRepo,
httpClient: OkHttpClient,
) {
private val log = LoggerFactory.getLogger(this::class.java)
private val syndication: Syndication =
Syndication(
url = "${githubRepo.url}/releases.atom",
@ -21,7 +23,7 @@ class FeedService(
)
fun getNewReleases(existingReleases: Releases, releasePrefix: String): Releases {
log("Consuming releases feed for ${githubRepo.repoPath}")
log.info("Consuming releases feed for ${githubRepo.repoPath}")
val feedReader = syndication.create(FeedReader::class.java)
return feedReader.readAtom()

View file

@ -1,9 +1,8 @@
package de.rpr.githubreleases.publishing
import de.rpr.githubreleases.Config
import de.rpr.githubreleases.LogLevel
import de.rpr.githubreleases.log
import de.rpr.githubreleases.model.Releases
import org.slf4j.LoggerFactory
import social.bigbone.MastodonClient
import social.bigbone.api.entity.data.Visibility
import social.bigbone.api.exception.BigBoneRequestException
@ -13,11 +12,14 @@ class Publisher(
private val client: MastodonClient,
private val dryRun: Boolean = System.getenv("PUBLISH_DRY_RUN").toBoolean(),
) {
private val log = LoggerFactory.getLogger(this::class.java)
fun sendReleases(releases: Releases): Releases {
log("${releases.size} new releases to publish")
log.info("${releases.size} new releases to publish")
val result =
releases
.onEach { release -> log("Publishing release: ${release.title}") }
.onEach { release -> log.info("Publishing release: ${release.title}") }
.mapNotNull { release ->
val request =
client.statuses.postStatus(
@ -29,13 +31,13 @@ class Publisher(
if (!dryRun) {
request.execute()
} else {
log("Dry-Run, skipping publishing of event...")
log.info("Dry-Run, skipping publishing of event...")
}
return@mapNotNull release
} catch (ex: BigBoneRequestException) {
log("ERROR: Event with id ${release.id} couldn't be published: " + ex.httpStatusCode)
LogLevel.ERROR.log("Cause: ${ex.message}")
LogLevel.ERROR.log("Root cause: ${ex.cause?.message}")
log.info("ERROR: Event with id ${release.id} couldn't be published: " + ex.httpStatusCode)
log.error("Cause: ${ex.message}")
log.error("Root cause: ${ex.cause?.message}")
return@mapNotNull null
}
}

View file

@ -0,0 +1,15 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1} - %msg%n</pattern>
</encoder>
</appender>
<logger name="de.rpr" level="${LOG_LEVEL:-INFO}"/>
<root level="${LOG_LEVEL_ROOT:-ERROR}">
<appender-ref ref="STDOUT"/>
</root>
</configuration>

View file

@ -9,13 +9,13 @@ import (
const app = "github-release-bot"
// Builds and exports the application as a docker image tar file.
func build() error {
// Build builds and exports the application as a docker image tar file.
func Build() error {
return sh.RunV("./gradlew", "build", "jibBuildTar")
}
// Deploys the application to a specified host via scp.
// Deploy deploys the application to a specified host via scp.
func Deploy(host string) error {
mg.Deps(build)
mg.Deps(Build)
return sh.RunV("scp", "app/build/"+app+".tar", host+":/opt/stacks/"+app+"/")
}