Skip to content

Commit

Permalink
Feature/114 use promises (#115)
Browse files Browse the repository at this point in the history
* chore: 🙈 add sourcemap.json to gitignore

* feat: use promises
  • Loading branch information
lukadev-0 committed Jan 2, 2023
1 parent 5af9cd3 commit 11c1ee9
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 17 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,6 @@ Packages

# MKDocs
site/

# Luau LSP sourcemap
/sourcemap.json
4 changes: 3 additions & 1 deletion src/server/changelogs.lua
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ function changelogs:init()
end

RateLimits[player] = workspace.DistributedGameTime + RATELIMIT_DURATION
HydrateChangelogs:FireTo(player, githubRaw:GetFileAsync(File))
githubRaw:GetFile(File):andThen(function(contents)
HydrateChangelogs:FireTo(player, contents)
end)
end)

Players.PlayerRemoving:Connect(function(player)
Expand Down
30 changes: 14 additions & 16 deletions src/shared/githubRaw.lua
Original file line number Diff line number Diff line change
@@ -1,32 +1,30 @@
local RECURSION_LIMIT = 5
local GITHUB_RAW_URL = "https://raw.githubusercontent.com/"

local HttpService = game:GetService("HttpService")
local ReplicatedStorage = game:GetService("ReplicatedStorage")

local Promise = require(ReplicatedStorage.Packages.Promise)

local GITHUB_RAW_URL = "https://raw.githubusercontent.com/"

local githubRaw = {
RefreshList = {}
RefreshList = {},
}

function githubRaw:GetFileAsync(link, _depth: number?)
_depth = _depth or 1
if _depth > RECURSION_LIMIT then
return error("Too much calls on :GetFileAsync() while requesting "..link)
end
local success, file = pcall(function()
function githubRaw:GetFile(link: string)
return Promise.try(function()
return HttpService:GetAsync(GITHUB_RAW_URL .. link)
end)
end

if not success then
return githubRaw:GetFileAsync(link, _depth + 1)
end

return file
function githubRaw:GetFileWithRetries(link: string, maxRetries: number?)
return Promise.retry(githubRaw.GetFile, maxRetries or 5, githubRaw, link)
end

task.spawn(function()
while task.wait(120) do
for _, link in githubRaw.RefreshList do
githubRaw:GetFileAsync(link)
githubRaw:GetFileWithRetries(link):catch(function(err)
warn("Failed to fetch " .. link, err)
end)
end
end
end)
Expand Down

0 comments on commit 11c1ee9

Please sign in to comment.