implement caching for city and state routes

This commit is contained in:
Jan Barfuss 2024-12-20 01:00:29 +01:00
parent 9e27e076a0
commit 92ebea8506
2 changed files with 29 additions and 0 deletions

View File

@ -1,6 +1,7 @@
package server package server
import ( import (
"github.com/LeRoid-hub/Mensa-API/cache"
"github.com/LeRoid-hub/Mensa-API/fetch" "github.com/LeRoid-hub/Mensa-API/fetch"
"github.com/LeRoid-hub/Mensa-API/scrape" "github.com/LeRoid-hub/Mensa-API/scrape"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@ -15,6 +16,18 @@ func city(c *gin.Context) {
return return
} }
if cache.HasCacheData("city/" + city) {
cacheData, err := cache.GetCacheData("city/" + city)
if err != nil {
c.JSON(500, gin.H{
"error": err.Error(),
})
return
}
c.JSON(200, cacheData)
return
}
resp, err := fetch.Fetch(city) resp, err := fetch.Fetch(city)
if err != nil { if err != nil {
c.JSON(500, gin.H{ c.JSON(500, gin.H{
@ -32,5 +45,7 @@ func city(c *gin.Context) {
} }
scraped := scrape.ScrapeState(resp.Body) scraped := scrape.ScrapeState(resp.Body)
cache.SetCacheData("city/"+city, scraped)
c.JSON(200, scraped) c.JSON(200, scraped)
} }

View File

@ -1,6 +1,7 @@
package server package server
import ( import (
"github.com/LeRoid-hub/Mensa-API/cache"
"github.com/LeRoid-hub/Mensa-API/fetch" "github.com/LeRoid-hub/Mensa-API/fetch"
"github.com/LeRoid-hub/Mensa-API/scrape" "github.com/LeRoid-hub/Mensa-API/scrape"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@ -15,6 +16,18 @@ func state(c *gin.Context) {
return return
} }
if cache.HasCacheData("state/" + state) {
cacheData, err := cache.GetCacheData("state/" + state)
if err != nil {
c.JSON(500, gin.H{
"error": err.Error(),
})
return
}
c.JSON(200, cacheData)
return
}
resp, err := fetch.Fetch(state + ".html") resp, err := fetch.Fetch(state + ".html")
if err != nil { if err != nil {
c.JSON(500, gin.H{ c.JSON(500, gin.H{
@ -32,6 +45,7 @@ func state(c *gin.Context) {
} }
scraped := scrape.ScrapeState(resp.Body) scraped := scrape.ScrapeState(resp.Body)
cache.SetCacheData("state/"+state, scraped)
c.JSON(200, scraped) c.JSON(200, scraped)
} }