Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 59 additions & 6 deletions example/pkgname/userloader_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

65 changes: 59 additions & 6 deletions example/slice/usersliceloader_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

136 changes: 136 additions & 0 deletions example/slice/usersliceloader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -203,3 +203,139 @@ func TestUserLoader(t *testing.T) {
require.Equal(t, "user 6", users2[0][0].Name)
})
}

func TestUserLoaderWithNullCache(t *testing.T) {
var fetches [][]int
var mu sync.Mutex

dl := &UserSliceLoader{
wait: 10 * time.Millisecond,
maxBatch: 5,
fetch: func(keys []int) (users [][]example.User, errors []error) {
mu.Lock()
fetches = append(fetches, keys)
mu.Unlock()

users = make([][]example.User, len(keys))
errors = make([]error, len(keys))

for i, key := range keys {
if key%10 == 0 { // anything ending in zero is bad
errors[i] = fmt.Errorf("users not found")
} else {
users[i] = []example.User{
{ID: strconv.Itoa(key), Name: "user " + strconv.Itoa(key)},
{ID: strconv.Itoa(key), Name: "user " + strconv.Itoa(key)},
}
}
}
return users, errors
},
cache: &UserSliceLoaderNullCache{},
}

t.Run("fetch concurrent data", func(t *testing.T) {
t.Run("load user successfully", func(t *testing.T) {
t.Parallel()
u, err := dl.Load(1)
require.NoError(t, err)
require.Equal(t, u[0].ID, "1")
require.Equal(t, u[1].ID, "1")
})

t.Run("load failed user", func(t *testing.T) {
t.Parallel()
u, err := dl.Load(10)
require.Error(t, err)
require.Nil(t, u)
})

t.Run("load many users", func(t *testing.T) {
t.Parallel()
u, err := dl.LoadAll([]int{2, 10, 20, 4})
require.Equal(t, u[0][0].Name, "user 2")
require.Error(t, err[1])
require.Error(t, err[2])
require.Equal(t, u[3][0].Name, "user 4")
})

t.Run("load thunk", func(t *testing.T) {
t.Parallel()
thunk1 := dl.LoadThunk(5)
thunk2 := dl.LoadThunk(50)

u1, err1 := thunk1()
require.NoError(t, err1)
require.Equal(t, "user 5", u1[0].Name)

u2, err2 := thunk2()
require.Error(t, err2)
require.Nil(t, u2)
})
})

t.Run("it sent two batches", func(t *testing.T) {
mu.Lock()
defer mu.Unlock()

require.Len(t, fetches, 2)
assert.Len(t, fetches[0], 5)
assert.Len(t, fetches[1], 3)
})

t.Run("fetch more", func(t *testing.T) {

t.Run("previously cached", func(t *testing.T) {
t.Parallel()
u, err := dl.Load(1)
require.NoError(t, err)
require.Equal(t, u[0].ID, "1")
})

t.Run("load many users", func(t *testing.T) {
t.Parallel()
u, err := dl.LoadAll([]int{2, 4})
require.NoError(t, err[0])
require.NoError(t, err[1])
require.Equal(t, u[0][0].Name, "user 2")
require.Equal(t, u[1][0].Name, "user 4")
})
})

t.Run("fetch partial", func(t *testing.T) {
t.Run("errors not in cache cache value", func(t *testing.T) {
t.Parallel()
u, err := dl.Load(20)
require.Nil(t, u)
require.Error(t, err)
})

t.Run("load all", func(t *testing.T) {
t.Parallel()
u, err := dl.LoadAll([]int{1, 4, 10, 9, 5})
require.Equal(t, u[0][0].ID, "1")
require.Equal(t, u[1][0].ID, "4")
require.Error(t, err[2])
require.Equal(t, u[3][0].ID, "9")
require.Equal(t, u[4][0].ID, "5")
})
})

t.Run("load all thunk", func(t *testing.T) {
thunk1 := dl.LoadAllThunk([]int{5, 6})
thunk2 := dl.LoadAllThunk([]int{6, 60})

users1, err1 := thunk1()

require.NoError(t, err1[0])
require.NoError(t, err1[1])
require.Equal(t, "user 5", users1[0][0].Name)
require.Equal(t, "user 6", users1[1][0].Name)

users2, err2 := thunk2()

require.NoError(t, err2[0])
require.Error(t, err2[1])
require.Equal(t, "user 6", users2[0][0].Name)
})
}
Loading