Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions api/dbv1/get_playlist_ids_by_permalink.sql.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

28 changes: 20 additions & 8 deletions api/dbv1/get_track_ids_by_permalink.sql.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

27 changes: 27 additions & 0 deletions api/dbv1/get_user_for_handle.sql.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 24 additions & 0 deletions api/dbv1/queries/get_playlist_ids_by_permalink.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
-- name: GetPlaylistIdsByPermalink :many
WITH lower_handles AS (
SELECT LOWER(h) AS handle
FROM unnest(@handles::text[]) AS h
),
lower_permalinks AS (
SELECT LOWER(p) AS permalink
FROM unnest(@permalinks::text[]) AS p
)
SELECT pr.playlist_id
FROM playlist_routes pr
JOIN users u ON u.user_id = pr.owner_id
JOIN lower_handles lh
ON u.handle_lc = lh.handle
WHERE pr.slug = ANY(@slugs::text[])
-- in case of conflicts across users
AND (
CONCAT('/', u.handle_lc, '/playlist/', LOWER(pr.slug)) = ANY(
SELECT permalink FROM lower_permalinks
)
OR CONCAT('/', u.handle_lc, '/album/', LOWER(pr.slug)) = ANY(
SELECT permalink FROM lower_permalinks
)
);
25 changes: 18 additions & 7 deletions api/dbv1/queries/get_track_ids_by_permalink.sql
Original file line number Diff line number Diff line change
@@ -1,8 +1,19 @@
-- name: GetTrackIdsByPermalink :many
SELECT track_id
FROM track_routes
JOIN users ON users.user_id = track_routes.owner_id
WHERE handle_lc = ANY(@handles::text[])
AND slug = ANY(@slugs::text[])
AND CONCAT(handle_lc, '/', slug) = ANY(@permalinks::text[]) -- in case of conflicts across users
;
WITH lower_handles AS (
SELECT LOWER(h) AS handle
FROM unnest(@handles::text[]) AS h
),
lower_permalinks AS (
SELECT LOWER(p) AS permalink
FROM unnest(@permalinks::text[]) AS p
)
SELECT tr.track_id
FROM track_routes tr
JOIN users u ON u.user_id = tr.owner_id
JOIN lower_handles lh
ON u.handle_lc = lh.handle
WHERE tr.slug = ANY(@slugs::text[])
-- in case of conflicts across usAers
AND CONCAT('/', u.handle_lc, '/', LOWER(tr.slug)) = ANY(
SELECT permalink FROM lower_permalinks
);
6 changes: 6 additions & 0 deletions api/dbv1/queries/get_user_for_handle.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- name: GetUserForHandle :one
SELECT user_id FROM users
WHERE
handle_lc = lower(@handle)
ORDER BY created_at ASC
LIMIT 1;
24 changes: 24 additions & 0 deletions api/fixture_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,30 @@ var (
"txhash": "tx123",
}

trackRouteBaseRow = map[string]any{
"slug": nil,
"title_slug": nil,
"collision_id": nil,
"owner_id": nil,
"track_id": nil,
"is_current": true,
"blockhash": "block_abc123",
"blocknumber": 101,
"txhash": "tx123",
}

playlistRouteBaseRow = map[string]any{
"slug": nil,
"title_slug": nil,
"collision_id": nil,
"owner_id": nil,
"playlist_id": nil,
"is_current": true,
"blockhash": "block_abc123",
"blocknumber": 101,
"txhash": "tx123",
}

commentBaseRow = map[string]any{
"entity_type": "Track",
"created_at": time.Now(),
Expand Down
4 changes: 4 additions & 0 deletions api/resolve_middleware.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ func (app *ApiServer) isFullMiddleware(c *fiber.Ctx) error {
return c.Next()
}

func (app *ApiServer) getIsFull(c *fiber.Ctx) bool {
return c.Locals("isFull").(bool)
}

// will set myId if valid, defaults to 0
func (app *ApiServer) resolveMyIdMiddleware(c *fiber.Ctx) error {
myId, _ := trashid.DecodeHashId(c.Query("user_id"))
Expand Down
14 changes: 9 additions & 5 deletions api/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,9 @@ func NewApiServer(config config.Config) *ApiServer {
// Rewards
g.Get("/rewards/claim", app.v1ClaimRewards)

// Resolve
g.Get("/resolve", app.v1Resolve)

// Comments
g.Get("/comments/unclaimed_id", app.v1CommentsUnclaimedId)
}
Expand Down Expand Up @@ -316,11 +319,12 @@ func (app *ApiServer) resolveUserHandleToId(handle string) (int32, error) {
if hit, ok := app.resolveHandleCache.Get(handle); ok {
return hit, nil
}
var userId int32
sql := `select user_id from users where handle_lc = lower($1)`
err := app.pool.QueryRow(context.Background(), sql, handle).Scan(&userId)
app.resolveHandleCache.Set(handle, userId)
return userId, err
user_id, err := app.queries.GetUserForHandle(context.Background(), handle)
Copy link
Copy Markdown
Contributor

@stereosteve stereosteve Apr 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should actually if err != nil {return err} here. My bad on that.

This should return pgx.ErrNoRows if not found I believe.

So returning on error avoids setting no_exist_handle = 0 in the cache... which would be bad if the indexer is behind and it might work on next try.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah great catch

if err != nil {
return 0, err
}
app.resolveHandleCache.Set(handle, int32(user_id))
return int32(user_id), nil
}

func (as *ApiServer) Serve() {
Expand Down
3 changes: 2 additions & 1 deletion api/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ func TestMain(m *testing.M) {
insertFixtures("associated_wallets", connectedWalletsBaseRow, "testdata/connected_wallets_fixtures.csv")
insertFixtures("aggregate_user_tips", aggregateUserTipsBaseRow, "testdata/aggregate_user_tips_fixtures.csv")
insertFixtures("usdc_purchases", usdcPurchaseBaseRow, "testdata/usdc_purchases_fixtures.csv")
insertFixtures("track_routes", map[string]any{}, "testdata/track_routes_fixtures.csv")
insertFixtures("track_routes", trackRouteBaseRow, "testdata/track_routes_fixtures.csv")
insertFixtures("playlist_routes", playlistRouteBaseRow, "testdata/playlist_routes_fixtures.csv")
insertFixtures("grants", grantBaseRow, "testdata/grants_fixtures.csv")
insertFixtures("comments", commentBaseRow, "testdata/comment_fixtures.csv")
insertFixtures("comment_threads", map[string]any{}, "testdata/comment_thread_fixtures.csv")
Expand Down
2 changes: 2 additions & 0 deletions api/testdata/playlist_fixtures.csv
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@ playlist_id,playlist_name,playlist_owner_id,is_album,playlist_contents,stream_co
2,Follow Gated Stream,3,t,"{}","{""follow_user_id"": 3}"
3,SecondAlbum,1,t,"{""track_ids"": [{""time"": 1722451644, ""track"": 200, ""metadata_time"": 1722451644},{""time"": 1722451644, ""track"": -1, ""metadata_time"": 1722451644},{""time"": 1722451644, ""track"": 300, ""metadata_time"": 1722451644}]}",
4,Purchase Gated Stream,3,t,"{}","{""usdc_purchase"": {""price"": 135, ""splits"": [{""user_id"": 3, ""percentage"": 100.0}]}}"
500,playlist by permalink,7,f,,
501,album by permalink,8,t,,
3 changes: 3 additions & 0 deletions api/testdata/playlist_routes_fixtures.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
slug,title_slug,collision_id,owner_id,playlist_id
playlist-by-permalink,playlist-by-permalink,0,7,500
album-by-permalink,album-by-permalink,0,8,501
4 changes: 2 additions & 2 deletions api/testdata/track_routes_fixtures.csv
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
"slug","title_slug","collision_id","owner_id","track_id","is_current","blockhash","blocknumber","txhash"
"track-by-permalink","track-by-permalink",0,6,500,TRUE,"0x24f1465e4bd8803b79b2cbcfad695363a640623053563ffd20fdeaf4656a7b89",23200013,"0x76dcea2bf98e56f683b95a071f5c87405a864eea87243920e60cbc7b96ad565b"
slug,title_slug,collision_id,owner_id,track_id
track-by-permalink,track-by-permalink,0,6,500
2 changes: 2 additions & 0 deletions api/testdata/user_fixtures.csv
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ user_id,handle,handle_lc,is_deactivated,wallet,playlist_library
4,accesstester,accesstester,f,0x34567890abcdef12,
5,guyintrending,guyintrending,f,0x34567890abcdef13,
6,TracksByPermalink,tracksbypermalink,f,0xffffffffff,
7,PlaylistsByPermalink,playlistsbypermalink,f,0xffffffffff,
8,AlbumsByPermalink,albumsbypermalink,f,0xffffffffff,
91,badguy,badguy,t,0x4567890abcdef123,
100,authtest1,authtest1,f,0x681c616ae836ceca1effe00bd07f2fdbf9a082bc,
101,authtest2,authtest2,f,0xc451c1f8943b575158310552b41230c61844a1c1,
Expand Down
25 changes: 25 additions & 0 deletions api/v1_playlists.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,31 @@ func (app *ApiServer) v1playlists(c *fiber.Ctx) error {
myId := app.getMyId(c)
ids := decodeIdList(c)

// Add permalink ID mappings
permalinks := queryMutli(c, "permalink")
if len(permalinks) > 0 {
handles := make([]string, len(permalinks))
slugs := make([]string, len(permalinks))
for i, permalink := range permalinks {
if match := playlistURLRegex.FindStringSubmatch(permalink); match != nil {
handles[i] = match[1]
slugs[i] = match[3]
permalinks[i] = permalink
} else {
return fiber.NewError(fiber.StatusBadRequest, "Invalid permalink: "+permalinks[i])
}
}
newIds, err := app.queries.GetPlaylistIdsByPermalink(c.Context(), dbv1.GetPlaylistIdsByPermalinkParams{
Handles: handles,
Slugs: slugs,
Permalinks: permalinks,
})
if err != nil {
return err
}
ids = append(ids, newIds...)
}

playlists, err := app.queries.FullPlaylists(c.Context(), dbv1.GetPlaylistsParams{
MyID: myId,
Ids: ids,
Expand Down
39 changes: 33 additions & 6 deletions api/v1_playlists_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"testing"

"bridgerton.audius.co/api/dbv1"
"bridgerton.audius.co/trashid"
"github.com/stretchr/testify/assert"
)

Expand All @@ -13,11 +12,39 @@ func TestPlaylistsEndpoint(t *testing.T) {
Data []dbv1.FullPlaylist
}

status, _ := testGet(t, "/v1/full/playlists?id=7eP5n", &resp)
status, body := testGet(t, "/v1/full/playlists?id=7eP5n", &resp)
assert.Equal(t, 200, status)

pl := resp.Data[0]
assert.Equal(t, pl.ID, "7eP5n")
assert.Len(t, pl.Tracks, 2)
assert.Equal(t, trashid.HashId(2), pl.Tracks[0].User.ID)
jsonAssert(t, body, map[string]string{
"data.0.id": "7eP5n",
"data.0.playlist_name": "First",
})
}

func TestPlaylistsEndpointWithPlaylistPermalink(t *testing.T) {
var resp struct {
Data []dbv1.FullPlaylist
}

status, body := testGet(t, "/v1/full/playlists?permalink=/PlaylistsByPermalink/playlist/playlist-by-permalink", &resp)
assert.Equal(t, 200, status)

jsonAssert(t, body, map[string]string{
"data.0.id": "eYake",
"data.0.playlist_name": "playlist by permalink",
})
}

func TestPlaylistsEndpointWithAlbumPermalink(t *testing.T) {
var resp struct {
Data []dbv1.FullPlaylist
}

status, body := testGet(t, "/v1/full/playlists?permalink=/AlbumsByPermalink/album/album-by-permalink", &resp)
assert.Equal(t, 200, status)

jsonAssert(t, body, map[string]string{
"data.0.id": "ePVXL",
"data.0.playlist_name": "album by permalink",
})
}
Loading
Loading