1
0
mirror of https://github.com/IBM/fp-go.git synced 2026-03-26 14:30:35 +02:00

Compare commits

..

5 Commits

Author SHA1 Message Date
renovate[bot]
c754cacf1f fix(deps): update module github.com/urfave/cli/v3 to v3.8.0 (#159)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-03-25 20:40:07 +00:00
Dr. Carsten Leue
d357b32847 fix: add TapThunkK
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-23 18:43:49 +01:00
Dr. Carsten Leue
a3af003e74 fix: undo Pipe and Flow changes, did not have the desired effect
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-20 23:20:07 +01:00
Dr. Carsten Leue
c81235827b fix: try to change the way Pipe and Flow are structured
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-20 12:05:25 +01:00
Dr. Carsten Leue
f35430cf18 fix: introduce async iterators
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-18 10:19:03 +01:00
9 changed files with 1881 additions and 31 deletions

View File

@@ -204,6 +204,102 @@ func ChainFirst[C, A, B any](f Kleisli[C, A, B]) Operator[C, A, A] {
return readerreaderioresult.ChainFirst(f)
}
// ChainFirstThunkK chains an effect with a function that returns a Thunk,
// but discards the result and returns the original value.
// This is useful for performing side effects (like logging or IO operations) that don't
// need the effect's context, without changing the value flowing through the computation.
//
// # Type Parameters
//
// - C: The context type required by the effect
// - A: The value type (preserved)
// - B: The type produced by the Thunk (discarded)
//
// # Parameters
//
// - f: A function that takes A and returns Thunk[B] for side effects
//
// # Returns
//
// - Operator[C, A, A]: A function that executes the Thunk but preserves the original value
//
// # Example
//
// logToFile := func(n int) readerioresult.ReaderIOResult[any] {
// return func(ctx context.Context) io.IO[result.Result[any]] {
// return func() result.Result[any] {
// // Perform IO operation that doesn't need effect context
// fmt.Printf("Logging: %d\n", n)
// return result.Of[any](nil)
// }
// }
// }
//
// eff := effect.Of[MyContext](42)
// logged := effect.ChainFirstThunkK[MyContext](logToFile)(eff)
// // Prints "Logging: 42" but still produces 42
//
// # See Also
//
// - ChainThunkK: Chains with a Thunk and uses its result
// - TapThunkK: Alias for ChainFirstThunkK
// - ChainFirstIOK: Similar but for IO operations
//
//go:inline
func ChainFirstThunkK[C, A, B any](f thunk.Kleisli[A, B]) Operator[C, A, A] {
return fromreader.ChainFirstReaderK(
ChainFirst[C, A, B],
FromThunk[C, B],
f,
)
}
// TapThunkK is an alias for ChainFirstThunkK.
// It chains an effect with a function that returns a Thunk for side effects,
// but preserves the original value. This is useful for logging, debugging, or
// performing IO operations that don't need the effect's context.
//
// # Type Parameters
//
// - C: The context type required by the effect
// - A: The value type (preserved)
// - B: The type produced by the Thunk (discarded)
//
// # Parameters
//
// - f: A function that takes A and returns Thunk[B] for side effects
//
// # Returns
//
// - Operator[C, A, A]: A function that executes the Thunk but preserves the original value
//
// # Example
//
// performSideEffect := func(n int) readerioresult.ReaderIOResult[any] {
// return func(ctx context.Context) io.IO[result.Result[any]] {
// return func() result.Result[any] {
// // Perform context-independent IO operation
// log.Printf("Processing value: %d", n)
// return result.Of[any](nil)
// }
// }
// }
//
// eff := effect.Of[MyContext](42)
// tapped := effect.TapThunkK[MyContext](performSideEffect)(eff)
// // Logs "Processing value: 42" but still produces 42
//
// # See Also
//
// - ChainFirstThunkK: The underlying implementation
// - TapIOK: Similar but for IO operations
// - Tap: Similar but for full effects
//
//go:inline
func TapThunkK[C, A, B any](f thunk.Kleisli[A, B]) Operator[C, A, A] {
return ChainFirstThunkK[C](f)
}
// ChainIOK chains an effect with a function that returns an IO action.
// This is useful for integrating IO-based computations (synchronous side effects)
// into effect chains. The IO action is automatically lifted into the Effect context.

View File

@@ -678,6 +678,587 @@ func TestChainThunkK_Integration(t *testing.T) {
})
}
func TestChainFirstThunkK_Success(t *testing.T) {
t.Run("executes thunk but preserves original value", func(t *testing.T) {
sideEffectExecuted := false
sideEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
sideEffectExecuted = true
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](sideEffect),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(42), outcome)
assert.True(t, sideEffectExecuted)
})
t.Run("chains multiple side effects", func(t *testing.T) {
log := []string{}
logValue := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("log: %d", n))
return result.Of[any](nil)
}
}
}
computation := F.Pipe2(
Of[TestConfig](10),
ChainFirstThunkK[TestConfig](logValue),
ChainFirstThunkK[TestConfig](logValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(10), outcome)
assert.Equal(t, 2, len(log))
assert.Equal(t, "log: 10", log[0])
assert.Equal(t, "log: 10", log[1])
})
t.Run("side effect can access runtime context", func(t *testing.T) {
var capturedCtx context.Context
captureContext := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
capturedCtx = ctx
return result.Of[any](nil)
}
}
}
ctx := context.Background()
computation := F.Pipe1(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](captureContext),
)
outcome := computation(testConfig)(ctx)()
assert.Equal(t, result.Of(42), outcome)
assert.Equal(t, ctx, capturedCtx)
})
t.Run("side effect result is discarded", func(t *testing.T) {
returnDifferentValue := func(n int) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) io.IO[result.Result[string]] {
return func() result.Result[string] {
return result.Of("different value")
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](returnDifferentValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(42), outcome)
})
}
func TestChainFirstThunkK_Failure(t *testing.T) {
t.Run("propagates error from previous effect", func(t *testing.T) {
testErr := fmt.Errorf("previous error")
sideEffectExecuted := false
sideEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
sideEffectExecuted = true
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Fail[TestConfig, int](testErr),
ChainFirstThunkK[TestConfig](sideEffect),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Left[int](testErr), outcome)
assert.False(t, sideEffectExecuted)
})
t.Run("propagates error from thunk side effect", func(t *testing.T) {
testErr := fmt.Errorf("side effect error")
failingSideEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
return result.Left[any](testErr)
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](failingSideEffect),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Left[int](testErr), outcome)
})
t.Run("stops execution on first error", func(t *testing.T) {
testErr := fmt.Errorf("first error")
secondEffectExecuted := false
failingEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
return result.Left[any](testErr)
}
}
}
secondEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
secondEffectExecuted = true
return result.Of[any](nil)
}
}
}
computation := F.Pipe2(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](failingEffect),
ChainFirstThunkK[TestConfig](secondEffect),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Left[int](testErr), outcome)
assert.False(t, secondEffectExecuted)
})
}
func TestChainFirstThunkK_EdgeCases(t *testing.T) {
t.Run("handles zero value", func(t *testing.T) {
callCount := 0
countCalls := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
callCount++
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig](0),
ChainFirstThunkK[TestConfig](countCalls),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(0), outcome)
assert.Equal(t, 1, callCount)
})
t.Run("handles empty string", func(t *testing.T) {
var capturedValue string
captureValue := func(s string) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
capturedValue = s
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig](""),
ChainFirstThunkK[TestConfig](captureValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(""), outcome)
assert.Equal(t, "", capturedValue)
})
t.Run("handles nil pointer", func(t *testing.T) {
var capturedPtr *int
capturePtr := func(ptr *int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
capturedPtr = ptr
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig]((*int)(nil)),
ChainFirstThunkK[TestConfig](capturePtr),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of((*int)(nil)), outcome)
assert.Nil(t, capturedPtr)
})
}
func TestChainFirstThunkK_Integration(t *testing.T) {
t.Run("composes with Map and Chain", func(t *testing.T) {
log := []string{}
logValue := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("value: %d", n))
return result.Of[any](nil)
}
}
}
computation := F.Pipe3(
Of[TestConfig](5),
Map[TestConfig](func(x int) int { return x * 2 }),
ChainFirstThunkK[TestConfig](logValue),
Map[TestConfig](func(x int) int { return x + 3 }),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(13), outcome) // (5 * 2) + 3
assert.Equal(t, 1, len(log))
assert.Equal(t, "value: 10", log[0])
})
t.Run("composes with ChainThunkK", func(t *testing.T) {
log := []string{}
logSideEffect := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("side-effect: %d", n))
return result.Of[any](nil)
}
}
}
transformValue := func(n int) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) io.IO[result.Result[string]] {
return func() result.Result[string] {
log = append(log, fmt.Sprintf("transform: %d", n))
return result.Of(fmt.Sprintf("Result: %d", n))
}
}
}
computation := F.Pipe2(
Of[TestConfig](42),
ChainFirstThunkK[TestConfig](logSideEffect),
ChainThunkK[TestConfig](transformValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of("Result: 42"), outcome)
assert.Equal(t, 2, len(log))
assert.Equal(t, "side-effect: 42", log[0])
assert.Equal(t, "transform: 42", log[1])
})
t.Run("composes with ChainReaderK and ChainReaderIOK", func(t *testing.T) {
log := []string{}
addMultiplier := func(n int) reader.Reader[TestConfig, int] {
return func(cfg TestConfig) int {
return n + cfg.Multiplier
}
}
logReaderIO := func(n int) readerio.ReaderIO[TestConfig, int] {
return func(cfg TestConfig) io.IO[int] {
return func() int {
log = append(log, fmt.Sprintf("reader-io: %d", n))
return n * 2
}
}
}
logThunk := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("thunk: %d", n))
return result.Of[any](nil)
}
}
}
computation := F.Pipe3(
Of[TestConfig](5),
ChainReaderK(addMultiplier),
ChainReaderIOK(logReaderIO),
ChainFirstThunkK[TestConfig](logThunk),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(16), outcome) // (5 + 3) * 2
assert.Equal(t, 2, len(log))
assert.Equal(t, "reader-io: 8", log[0])
assert.Equal(t, "thunk: 16", log[1])
})
}
func TestTapThunkK_Success(t *testing.T) {
t.Run("is alias for ChainFirstThunkK", func(t *testing.T) {
log := []string{}
logValue := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("tapped: %d", n))
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
TapThunkK[TestConfig](logValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(42), outcome)
assert.Equal(t, 1, len(log))
assert.Equal(t, "tapped: 42", log[0])
})
t.Run("useful for logging without changing value", func(t *testing.T) {
log := []string{}
logStep := func(step string) func(int) readerioresult.ReaderIOResult[any] {
return func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("%s: %d", step, n))
return result.Of[any](nil)
}
}
}
}
computation := F.Pipe4(
Of[TestConfig](10),
TapThunkK[TestConfig](logStep("start")),
Map[TestConfig](func(x int) int { return x * 2 }),
TapThunkK[TestConfig](logStep("after-map")),
Map[TestConfig](func(x int) int { return x + 5 }),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(25), outcome) // (10 * 2) + 5
assert.Equal(t, 2, len(log))
assert.Equal(t, "start: 10", log[0])
assert.Equal(t, "after-map: 20", log[1])
})
t.Run("can perform IO operations", func(t *testing.T) {
var ioExecuted bool
performIO := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
// Simulate IO operation
ioExecuted = true
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
TapThunkK[TestConfig](performIO),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(42), outcome)
assert.True(t, ioExecuted)
})
}
func TestTapThunkK_Failure(t *testing.T) {
t.Run("propagates error from previous effect", func(t *testing.T) {
testErr := fmt.Errorf("previous error")
tapExecuted := false
tapValue := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
tapExecuted = true
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
Fail[TestConfig, int](testErr),
TapThunkK[TestConfig](tapValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Left[int](testErr), outcome)
assert.False(t, tapExecuted)
})
t.Run("propagates error from tap operation", func(t *testing.T) {
testErr := fmt.Errorf("tap error")
failingTap := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
return result.Left[any](testErr)
}
}
}
computation := F.Pipe1(
Of[TestConfig](42),
TapThunkK[TestConfig](failingTap),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Left[int](testErr), outcome)
})
}
func TestTapThunkK_EdgeCases(t *testing.T) {
t.Run("handles multiple taps in sequence", func(t *testing.T) {
log := []string{}
tap1 := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, "tap1")
return result.Of[any](nil)
}
}
}
tap2 := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, "tap2")
return result.Of[any](nil)
}
}
}
tap3 := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, "tap3")
return result.Of[any](nil)
}
}
}
computation := F.Pipe3(
Of[TestConfig](42),
TapThunkK[TestConfig](tap1),
TapThunkK[TestConfig](tap2),
TapThunkK[TestConfig](tap3),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(42), outcome)
assert.Equal(t, []string{"tap1", "tap2", "tap3"}, log)
})
}
func TestTapThunkK_Integration(t *testing.T) {
t.Run("real-world logging scenario", func(t *testing.T) {
log := []string{}
logStart := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("Starting computation with: %d", n))
return result.Of[any](nil)
}
}
}
logIntermediate := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("Intermediate result: %d", n))
return result.Of[any](nil)
}
}
}
logFinal := func(s string) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("Final result: %s", s))
return result.Of[any](nil)
}
}
}
computation := F.Pipe5(
Of[TestConfig](10),
TapThunkK[TestConfig](logStart),
Map[TestConfig](func(x int) int { return x * 3 }),
TapThunkK[TestConfig](logIntermediate),
Map[TestConfig](func(x int) string { return fmt.Sprintf("Value: %d", x) }),
TapThunkK[TestConfig](logFinal),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of("Value: 30"), outcome)
assert.Equal(t, 3, len(log))
assert.Equal(t, "Starting computation with: 10", log[0])
assert.Equal(t, "Intermediate result: 30", log[1])
assert.Equal(t, "Final result: Value: 30", log[2])
})
t.Run("composes with FromThunk", func(t *testing.T) {
log := []string{}
thunk := func(ctx context.Context) io.IO[result.Result[int]] {
return func() result.Result[int] {
return result.Of(100)
}
}
logValue := func(n int) readerioresult.ReaderIOResult[any] {
return func(ctx context.Context) io.IO[result.Result[any]] {
return func() result.Result[any] {
log = append(log, fmt.Sprintf("value: %d", n))
return result.Of[any](nil)
}
}
}
computation := F.Pipe1(
FromThunk[TestConfig](thunk),
TapThunkK[TestConfig](logValue),
)
outcome := computation(testConfig)(context.Background())()
assert.Equal(t, result.Of(100), outcome)
assert.Equal(t, 1, len(log))
assert.Equal(t, "value: 100", log[0])
})
}
func TestAsks_Success(t *testing.T) {
t.Run("extracts a field from context", func(t *testing.T) {
type Config struct {
@@ -685,7 +1266,7 @@ func TestAsks_Success(t *testing.T) {
Port int
}
getHost := Asks[Config](func(cfg Config) string {
getHost := Asks(func(cfg Config) string {
return cfg.Host
})
@@ -701,7 +1282,7 @@ func TestAsks_Success(t *testing.T) {
Port int
}
getURL := Asks[Config](func(cfg Config) string {
getURL := Asks(func(cfg Config) string {
return fmt.Sprintf("http://%s:%d", cfg.Host, cfg.Port)
})
@@ -712,7 +1293,7 @@ func TestAsks_Success(t *testing.T) {
})
t.Run("extracts numeric field", func(t *testing.T) {
getPort := Asks[TestConfig](func(cfg TestConfig) int {
getPort := Asks(func(cfg TestConfig) int {
return cfg.Multiplier
})
@@ -728,7 +1309,7 @@ func TestAsks_Success(t *testing.T) {
Height int
}
getArea := Asks[Config](func(cfg Config) int {
getArea := Asks(func(cfg Config) int {
return cfg.Width * cfg.Height
})
@@ -739,7 +1320,7 @@ func TestAsks_Success(t *testing.T) {
})
t.Run("transforms string field", func(t *testing.T) {
getUpperPrefix := Asks[TestConfig](func(cfg TestConfig) string {
getUpperPrefix := Asks(func(cfg TestConfig) string {
return fmt.Sprintf("[%s]", cfg.Prefix)
})
@@ -756,7 +1337,7 @@ func TestAsks_EdgeCases(t *testing.T) {
Value int
}
getValue := Asks[Config](func(cfg Config) int {
getValue := Asks(func(cfg Config) int {
return cfg.Value
})
@@ -771,7 +1352,7 @@ func TestAsks_EdgeCases(t *testing.T) {
Name string
}
getName := Asks[Config](func(cfg Config) string {
getName := Asks(func(cfg Config) string {
return cfg.Name
})
@@ -786,7 +1367,7 @@ func TestAsks_EdgeCases(t *testing.T) {
Data *string
}
hasData := Asks[Config](func(cfg Config) bool {
hasData := Asks(func(cfg Config) bool {
return cfg.Data != nil
})
@@ -805,7 +1386,7 @@ func TestAsks_EdgeCases(t *testing.T) {
DB Database
}
getDBHost := Asks[Config](func(cfg Config) string {
getDBHost := Asks(func(cfg Config) string {
return cfg.DB.Host
})
@@ -825,7 +1406,7 @@ func TestAsks_Integration(t *testing.T) {
}
computation := F.Pipe1(
Asks[Config](func(cfg Config) int {
Asks(func(cfg Config) int {
return cfg.Value
}),
Map[Config](func(x int) int { return x * 2 }),
@@ -843,7 +1424,7 @@ func TestAsks_Integration(t *testing.T) {
}
computation := F.Pipe1(
Asks[Config](func(cfg Config) int {
Asks(func(cfg Config) int {
return cfg.Multiplier
}),
Chain(func(mult int) Effect[Config, int] {
@@ -859,7 +1440,7 @@ func TestAsks_Integration(t *testing.T) {
t.Run("composes with ChainReaderK", func(t *testing.T) {
computation := F.Pipe1(
Asks[TestConfig](func(cfg TestConfig) int {
Asks(func(cfg TestConfig) int {
return cfg.Multiplier
}),
ChainReaderK(func(mult int) reader.Reader[TestConfig, int] {
@@ -879,7 +1460,7 @@ func TestAsks_Integration(t *testing.T) {
log := []string{}
computation := F.Pipe1(
Asks[TestConfig](func(cfg TestConfig) string {
Asks(func(cfg TestConfig) string {
return cfg.Prefix
}),
ChainReaderIOK(func(prefix string) readerio.ReaderIO[TestConfig, string] {
@@ -906,11 +1487,11 @@ func TestAsks_Integration(t *testing.T) {
}
computation := F.Pipe2(
Asks[Config](func(cfg Config) string {
Asks(func(cfg Config) string {
return cfg.First
}),
Chain(func(_ string) Effect[Config, string] {
return Asks[Config](func(cfg Config) string {
return Asks(func(cfg Config) string {
return cfg.Second
})
}),
@@ -933,7 +1514,7 @@ func TestAsks_Integration(t *testing.T) {
computation := F.Pipe1(
Ask[Config](),
Chain(func(cfg Config) Effect[Config, int] {
return Asks[Config](func(c Config) int {
return Asks(func(c Config) int {
return c.Value * 2
})
}),
@@ -953,7 +1534,7 @@ func TestAsks_Comparison(t *testing.T) {
}
// Using Asks
asksVersion := Asks[Config](func(cfg Config) int {
asksVersion := Asks(func(cfg Config) int {
return cfg.Port
})
@@ -983,7 +1564,7 @@ func TestAsks_Comparison(t *testing.T) {
}
// Asks is more direct for field extraction
getHost := Asks[Config](func(cfg Config) string {
getHost := Asks(func(cfg Config) string {
return cfg.Host
})
@@ -1003,7 +1584,7 @@ func TestAsks_RealWorldScenarios(t *testing.T) {
User string
}
getConnectionString := Asks[DatabaseConfig](func(cfg DatabaseConfig) string {
getConnectionString := Asks(func(cfg DatabaseConfig) string {
return fmt.Sprintf("postgres://%s@%s:%d/%s",
cfg.User, cfg.Host, cfg.Port, cfg.Database)
})
@@ -1027,7 +1608,7 @@ func TestAsks_RealWorldScenarios(t *testing.T) {
BasePath string
}
getEndpoint := Asks[APIConfig](func(cfg APIConfig) string {
getEndpoint := Asks(func(cfg APIConfig) string {
return fmt.Sprintf("%s://%s:%d%s",
cfg.Protocol, cfg.Host, cfg.Port, cfg.BasePath)
})
@@ -1049,7 +1630,7 @@ func TestAsks_RealWorldScenarios(t *testing.T) {
MaxRetries int
}
isValid := Asks[Config](func(cfg Config) bool {
isValid := Asks(func(cfg Config) bool {
return cfg.Timeout > 0 && cfg.MaxRetries >= 0
})

View File

@@ -650,4 +650,4 @@ func TestFilter_Integration(t *testing.T) {
})
}
// Made with Bob

View File

@@ -4,7 +4,7 @@ go 1.24
require (
github.com/stretchr/testify v1.11.1
github.com/urfave/cli/v3 v3.7.0
github.com/urfave/cli/v3 v3.8.0
)
require (

View File

@@ -8,6 +8,8 @@ github.com/urfave/cli/v3 v3.6.2 h1:lQuqiPrZ1cIz8hz+HcrG0TNZFxU70dPZ3Yl+pSrH9A8=
github.com/urfave/cli/v3 v3.6.2/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
github.com/urfave/cli/v3 v3.7.0 h1:AGSnbUyjtLiM+WJUb4dzXKldl/gL+F8OwmRDtVr6g2U=
github.com/urfave/cli/v3 v3.7.0/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
github.com/urfave/cli/v3 v3.8.0 h1:XqKPrm0q4P0q5JpoclYoCAv0/MIvH/jZ2umzuf8pNTI=
github.com/urfave/cli/v3 v3.8.0/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

195
v2/iterator/iter/async.go Normal file
View File

@@ -0,0 +1,195 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package iter
import (
N "github.com/IBM/fp-go/v2/number"
)
// Async converts a synchronous sequence into an asynchronous buffered sequence.
// It spawns a goroutine to consume the input sequence and sends values through
// a buffered channel, allowing concurrent production and consumption of elements.
//
// The function provides backpressure control through the buffer size and properly
// handles early termination when the consumer stops iterating. This is useful for
// decoupling producers and consumers, enabling pipeline parallelism, or when you
// need to process sequences concurrently.
//
// # Type Parameters
//
// - T: The type of elements in the sequence
//
// # Parameters
//
// - input: The source sequence to be consumed asynchronously
// - bufSize: The buffer size for the channel. Negative values are treated as 0 (unbuffered).
// A larger buffer allows more elements to be produced ahead of consumption,
// but uses more memory. A buffer of 0 creates an unbuffered channel requiring
// synchronization between producer and consumer.
//
// # Returns
//
// - Seq[T]: A new sequence that yields elements from the input sequence asynchronously
//
// # Behavior
//
// - Spawns a goroutine that consumes the input sequence
// - Elements are sent through a buffered channel to the output sequence
// - Properly handles early termination: if the consumer stops iterating (yield returns false),
// the producer goroutine is signaled to stop via a done channel
// - Both the producer goroutine and the done channel are properly cleaned up
// - The channel is closed when the input sequence is exhausted or early termination occurs
//
// # Example Usage
//
// // Create an async sequence with a buffer of 10
// seq := From(1, 2, 3, 4, 5)
// async := Async(seq, 10)
//
// // Elements are produced concurrently
// for v := range async {
// fmt.Println(v) // Prints: 1, 2, 3, 4, 5
// }
//
// # Example with Early Termination
//
// seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
// async := Async(seq, 5)
//
// // Stop after 3 elements - producer goroutine will be properly cleaned up
// count := 0
// for v := range async {
// fmt.Println(v)
// count++
// if count >= 3 {
// break
// }
// }
//
// # Example with Unbuffered Channel
//
// // bufSize of 0 creates an unbuffered channel
// seq := From(1, 2, 3)
// async := Async(seq, 0)
//
// // Producer and consumer are synchronized
// for v := range async {
// fmt.Println(v)
// }
//
// # See Also
//
// - From: Creates a sequence from values
// - Map: Transforms sequence elements
// - Filter: Filters sequence elements
func Async[T any](input Seq[T], bufSize int) Seq[T] {
return func(yield func(T) bool) {
ch := make(chan T, N.Max(bufSize, 0))
done := make(chan Void)
go func() {
defer close(ch)
for v := range input {
select {
case ch <- v:
case <-done:
return
}
}
}()
defer close(done)
for v := range ch {
if !yield(v) {
return
}
}
}
}
// Async2 converts a synchronous key-value sequence into an asynchronous buffered sequence.
// It spawns a goroutine to consume the input sequence and sends key-value pairs through
// a buffered channel, allowing concurrent production and consumption of elements.
//
// This function is the Seq2 variant of Async, providing the same asynchronous behavior
// for key-value sequences. It internally converts the Seq2 to a sequence of Pairs,
// applies Async, and converts back to Seq2.
//
// # Type Parameters
//
// - K: The type of keys in the sequence
// - V: The type of values in the sequence
//
// # Parameters
//
// - input: The source key-value sequence to be consumed asynchronously
// - bufSize: The buffer size for the channel. Negative values are treated as 0 (unbuffered).
// A larger buffer allows more elements to be produced ahead of consumption,
// but uses more memory. A buffer of 0 creates an unbuffered channel requiring
// synchronization between producer and consumer.
//
// # Returns
//
// - Seq2[K, V]: A new key-value sequence that yields elements from the input sequence asynchronously
//
// # Behavior
//
// - Spawns a goroutine that consumes the input key-value sequence
// - Key-value pairs are sent through a buffered channel to the output sequence
// - Properly handles early termination: if the consumer stops iterating (yield returns false),
// the producer goroutine is signaled to stop via a done channel
// - Both the producer goroutine and the done channel are properly cleaned up
// - The channel is closed when the input sequence is exhausted or early termination occurs
//
// # Example Usage
//
// // Create an async key-value sequence with a buffer of 10
// seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
// async := Async2(seq, 10)
//
// // Elements are produced concurrently
// for k, v := range async {
// fmt.Printf("%d: %s\n", k, v)
// }
// // Output:
// // 1: a
// // 2: b
// // 3: c
//
// # Example with Early Termination
//
// seq := MonadZip(From(1, 2, 3, 4, 5), From("a", "b", "c", "d", "e"))
// async := Async2(seq, 5)
//
// // Stop after 2 pairs - producer goroutine will be properly cleaned up
// count := 0
// for k, v := range async {
// fmt.Printf("%d: %s\n", k, v)
// count++
// if count >= 2 {
// break
// }
// }
//
// # See Also
//
// - Async: Asynchronous sequence for single-value sequences
// - ToSeqPair: Converts Seq2 to Seq of Pairs
// - FromSeqPair: Converts Seq of Pairs to Seq2
// - MonadZip: Creates key-value sequences from two sequences
func Async2[K, V any](input Seq2[K, V], bufSize int) Seq2[K, V] {
return FromSeqPair(Async(ToSeqPair(input), bufSize))
}

View File

@@ -0,0 +1,905 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package iter
import (
"fmt"
"sync/atomic"
"testing"
"time"
N "github.com/IBM/fp-go/v2/number"
"github.com/IBM/fp-go/v2/pair"
"github.com/stretchr/testify/assert"
)
// TestAsync_Success tests basic Async functionality
func TestAsync_Success(t *testing.T) {
t.Run("converts sequence to async with buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("preserves element order", func(t *testing.T) {
seq := From("a", "b", "c", "d", "e")
async := Async(seq, 5)
result := toSlice(async)
assert.Equal(t, []string{"a", "b", "c", "d", "e"}, result)
})
t.Run("works with single element", func(t *testing.T) {
seq := From(42)
async := Async(seq, 1)
result := toSlice(async)
assert.Equal(t, []int{42}, result)
})
t.Run("works with large sequence", func(t *testing.T) {
data := make([]int, 100)
for i := range data {
data[i] = i
}
seq := From(data...)
async := Async(seq, 20)
result := toSlice(async)
assert.Equal(t, data, result)
})
}
// TestAsync_BufferSizes tests different buffer sizes
func TestAsync_BufferSizes(t *testing.T) {
t.Run("unbuffered channel (bufSize 0)", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 0)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("small buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 2)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("large buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 100)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("negative buffer size treated as 0", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, -5)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("buffer size equals sequence length", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("buffer size larger than sequence", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
}
// TestAsync_Empty tests Async with empty sequences
func TestAsync_Empty(t *testing.T) {
t.Run("empty integer sequence", func(t *testing.T) {
seq := Empty[int]()
async := Async(seq, 5)
result := toSlice(async)
assert.Empty(t, result)
})
t.Run("empty string sequence", func(t *testing.T) {
seq := Empty[string]()
async := Async(seq, 10)
result := toSlice(async)
assert.Empty(t, result)
})
t.Run("empty with zero buffer", func(t *testing.T) {
seq := Empty[int]()
async := Async(seq, 0)
result := toSlice(async)
assert.Empty(t, result)
})
}
// TestAsync_EarlyTermination tests that Async properly handles early termination
func TestAsync_EarlyTermination(t *testing.T) {
t.Run("stops producer when consumer breaks", func(t *testing.T) {
var producerCount atomic.Int32
// Create a sequence that tracks how many elements were produced
seq := func(yield func(int) bool) {
for i := range 100 {
producerCount.Add(1)
if !yield(i) {
return
}
}
}
async := Async(seq, 10)
// Consume only 5 elements
count := 0
for range async {
count++
if count >= 5 {
break
}
}
// Give goroutine time to clean up
time.Sleep(10 * time.Millisecond)
// Producer should have stopped shortly after consumer stopped
// It may produce a few extra due to buffering, but not all 100
produced := producerCount.Load()
assert.LessOrEqual(t, produced, int32(20), "producer should stop after consumer breaks")
assert.GreaterOrEqual(t, produced, int32(5), "producer should produce at least what was consumed")
})
t.Run("handles yield returning false", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
collected := []int{}
for v := range async {
collected = append(collected, v)
if v == 3 {
break
}
}
assert.Equal(t, []int{1, 2, 3}, collected)
})
t.Run("early termination with unbuffered channel", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 0)
collected := []int{}
for v := range async {
collected = append(collected, v)
if v == 2 {
break
}
}
assert.Equal(t, []int{1, 2}, collected)
})
}
// TestAsync_WithComplexTypes tests Async with complex data types
func TestAsync_WithComplexTypes(t *testing.T) {
type Person struct {
Name string
Age int
}
t.Run("works with structs", func(t *testing.T) {
seq := From(
Person{"Alice", 30},
Person{"Bob", 25},
Person{"Charlie", 35},
)
async := Async(seq, 5)
result := toSlice(async)
expected := []Person{
{"Alice", 30},
{"Bob", 25},
{"Charlie", 35},
}
assert.Equal(t, expected, result)
})
t.Run("works with pointers", func(t *testing.T) {
p1 := &Person{"Alice", 30}
p2 := &Person{"Bob", 25}
p3 := &Person{"Charlie", 35}
seq := From(p1, p2, p3)
async := Async(seq, 3)
result := toSlice(async)
assert.Equal(t, []*Person{p1, p2, p3}, result)
})
t.Run("works with slices", func(t *testing.T) {
seq := From([]int{1, 2}, []int{3, 4}, []int{5, 6})
async := Async(seq, 2)
result := toSlice(async)
expected := [][]int{{1, 2}, {3, 4}, {5, 6}}
assert.Equal(t, expected, result)
})
t.Run("works with maps", func(t *testing.T) {
m1 := map[string]int{"a": 1}
m2 := map[string]int{"b": 2}
m3 := map[string]int{"c": 3}
seq := From(m1, m2, m3)
async := Async(seq, 3)
result := toSlice(async)
assert.Equal(t, []map[string]int{m1, m2, m3}, result)
})
}
// TestAsync_WithChainedOperations tests Async with other sequence operations
func TestAsync_WithChainedOperations(t *testing.T) {
t.Run("async after map", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
mapped := MonadMap(seq, N.Mul(2))
async := Async(mapped, 5)
result := toSlice(async)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("map after async", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
mapped := MonadMap(async, N.Mul(2))
result := toSlice(mapped)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("async after filter", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
filtered := MonadFilter(seq, func(x int) bool { return x%2 == 0 })
async := Async(filtered, 5)
result := toSlice(async)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("filter after async", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
filtered := MonadFilter(async, func(x int) bool { return x%2 == 0 })
result := toSlice(filtered)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("async after chain", func(t *testing.T) {
seq := From(1, 2, 3)
chained := MonadChain(seq, func(x int) Seq[int] {
return From(x, x*10)
})
async := Async(chained, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 10, 2, 20, 3, 30}, result)
})
t.Run("multiple async operations", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async1 := Async(seq, 3)
async2 := Async(async1, 2)
result := toSlice(async2)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
}
// TestAsync_Concurrency tests concurrent behavior
func TestAsync_Concurrency(t *testing.T) {
t.Run("allows concurrent production and consumption", func(t *testing.T) {
// Create a slow producer
seq := func(yield func(int) bool) {
for i := range 5 {
time.Sleep(5 * time.Millisecond)
if !yield(i) {
return
}
}
}
async := Async(seq, 10)
result := toSlice(async)
// Verify all elements are produced correctly
assert.Equal(t, []int{0, 1, 2, 3, 4}, result)
})
t.Run("handles concurrent consumption safely", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
// Consume with some processing time
var sum atomic.Int32
for v := range async {
sum.Add(int32(v))
time.Sleep(1 * time.Millisecond)
}
assert.Equal(t, int32(55), sum.Load())
})
}
// TestAsync_EdgeCases tests edge cases
func TestAsync_EdgeCases(t *testing.T) {
t.Run("very large buffer size", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 1000000)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("buffer size of 1", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 1)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("works with replicate", func(t *testing.T) {
seq := Replicate(5, 42)
async := Async(seq, 3)
result := toSlice(async)
assert.Equal(t, []int{42, 42, 42, 42, 42}, result)
})
t.Run("works with makeBy", func(t *testing.T) {
seq := MakeBy(5, func(i int) int { return i * i })
async := Async(seq, 3)
result := toSlice(async)
assert.Equal(t, []int{0, 1, 4, 9, 16}, result)
})
}
// Benchmark tests
func BenchmarkAsync(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
for range async {
}
}
}
func BenchmarkAsync_LargeSequence(b *testing.B) {
data := make([]int, 1000)
for i := range data {
data[i] = i
}
seq := From(data...)
b.ResetTimer()
for range b.N {
async := Async(seq, 100)
for range async {
}
}
}
func BenchmarkAsync_SmallBuffer(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 1)
for range async {
}
}
}
func BenchmarkAsync_LargeBuffer(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 100)
for range async {
}
}
}
func BenchmarkAsync_Unbuffered(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 0)
for range async {
}
}
}
func BenchmarkAsync_WithMap(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
mapped := MonadMap(async, N.Mul(2))
for range mapped {
}
}
}
func BenchmarkAsync_WithFilter(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
filtered := MonadFilter(async, func(x int) bool { return x%2 == 0 })
for range filtered {
}
}
}
// Example tests for documentation
func ExampleAsync() {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 10)
for v := range async {
fmt.Printf("%d ", v)
}
// Output: 1 2 3 4 5
}
func ExampleAsync_unbuffered() {
seq := From(1, 2, 3)
async := Async(seq, 0)
for v := range async {
fmt.Printf("%d ", v)
}
// Output: 1 2 3
}
func ExampleAsync_earlyTermination() {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
count := 0
for v := range async {
fmt.Printf("%d ", v)
count++
if count >= 3 {
break
}
}
// Output: 1 2 3
}
func ExampleAsync_withMap() {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
doubled := MonadMap(async, N.Mul(2))
for v := range doubled {
fmt.Printf("%d ", v)
}
// Output: 2 4 6 8 10
}
func ExampleAsync_withFilter() {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
evens := MonadFilter(async, func(x int) bool { return x%2 == 0 })
for v := range evens {
fmt.Printf("%d ", v)
}
// Output: 2 4 6 8 10
}
// TestAsync2_Success tests basic Async2 functionality
func TestAsync2_Success(t *testing.T) {
t.Run("converts Seq2 to async with buffer", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 10)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
t.Run("preserves key-value pairs order", func(t *testing.T) {
seq := MonadZip(From("x", "y", "z"), From(10, 20, 30))
async := Async2(seq, 5)
keys := []string{}
values := []int{}
for k, v := range async {
keys = append(keys, k)
values = append(values, v)
}
assert.Equal(t, []string{"x", "y", "z"}, keys)
assert.Equal(t, []int{10, 20, 30}, values)
})
t.Run("works with single pair", func(t *testing.T) {
seq := Of2("key", 42)
async := Async2(seq, 1)
result := toMap(async)
assert.Equal(t, map[string]int{"key": 42}, result)
})
t.Run("works with large Seq2", func(t *testing.T) {
keys := make([]int, 100)
values := make([]string, 100)
for i := range keys {
keys[i] = i
values[i] = fmt.Sprintf("val%d", i)
}
seq := MonadZip(From(keys...), From(values...))
async := Async2(seq, 20)
result := toMap(async)
assert.Equal(t, 100, len(result))
for i := range 100 {
assert.Equal(t, fmt.Sprintf("val%d", i), result[i])
}
})
}
// TestAsync2_BufferSizes tests different buffer sizes
func TestAsync2_BufferSizes(t *testing.T) {
t.Run("unbuffered channel (bufSize 0)", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 0)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
t.Run("negative buffer size treated as 0", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, -5)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
t.Run("large buffer", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 100)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
}
// TestAsync2_Empty tests Async2 with empty sequences
func TestAsync2_Empty(t *testing.T) {
t.Run("empty Seq2", func(t *testing.T) {
seq := MonadZip(Empty[int](), Empty[string]())
async := Async2(seq, 5)
result := toMap(async)
assert.Empty(t, result)
})
}
// TestAsync2_EarlyTermination tests that Async2 properly handles early termination
func TestAsync2_EarlyTermination(t *testing.T) {
t.Run("stops producer when consumer breaks", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), From("a", "b", "c", "d", "e", "f", "g", "h", "i", "j"))
async := Async2(seq, 5)
count := 0
for range async {
count++
if count >= 3 {
break
}
}
assert.Equal(t, 3, count)
})
}
// TestAsync2_WithChainedOperations tests Async2 with other operations
func TestAsync2_WithChainedOperations(t *testing.T) {
t.Run("async2 after map", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From(10, 20, 30))
mapped := MonadMapWithKey(seq, func(k, v int) int { return k + v })
async := Async2(mapped, 5)
result := toMap(async)
expected := map[int]int{1: 11, 2: 22, 3: 33}
assert.Equal(t, expected, result)
})
}
// TestToSeqPair_Success tests basic ToSeqPair functionality
func TestToSeqPair_Success(t *testing.T) {
t.Run("converts Seq2 to Seq of Pairs", func(t *testing.T) {
seq2 := MonadZip(From(1, 2, 3), From("a", "b", "c"))
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, "a", pair.Tail(result[0]))
assert.Equal(t, 2, pair.Head(result[1]))
assert.Equal(t, "b", pair.Tail(result[1]))
assert.Equal(t, 3, pair.Head(result[2]))
assert.Equal(t, "c", pair.Tail(result[2]))
})
t.Run("preserves order", func(t *testing.T) {
seq2 := MonadZip(From("x", "y", "z"), From(10, 20, 30))
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
for i, p := range result {
expectedKey := string(rune('x' + i))
expectedVal := (i + 1) * 10
assert.Equal(t, expectedKey, pair.Head(p))
assert.Equal(t, expectedVal, pair.Tail(p))
}
})
t.Run("works with single pair", func(t *testing.T) {
seq2 := Of2("key", 42)
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 1, len(result))
assert.Equal(t, "key", pair.Head(result[0]))
assert.Equal(t, 42, pair.Tail(result[0]))
})
}
// TestToSeqPair_Empty tests ToSeqPair with empty sequences
func TestToSeqPair_Empty(t *testing.T) {
t.Run("empty Seq2 produces empty Seq", func(t *testing.T) {
seq2 := MonadZip(Empty[int](), Empty[string]())
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Empty(t, result)
})
}
// TestToSeqPair_WithComplexTypes tests ToSeqPair with complex types
func TestToSeqPair_WithComplexTypes(t *testing.T) {
type Person struct {
Name string
Age int
}
t.Run("works with struct values", func(t *testing.T) {
seq2 := MonadZip(
From(1, 2, 3),
From(Person{"Alice", 30}, Person{"Bob", 25}, Person{"Charlie", 35}),
)
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, Person{"Alice", 30}, pair.Tail(result[0]))
})
}
// TestFromSeqPair_Success tests basic FromSeqPair functionality
func TestFromSeqPair_Success(t *testing.T) {
t.Run("converts Seq of Pairs to Seq2", func(t *testing.T) {
pairs := From(
pair.MakePair(1, "a"),
pair.MakePair(2, "b"),
pair.MakePair(3, "c"),
)
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
t.Run("preserves order", func(t *testing.T) {
pairs := From(
pair.MakePair("x", 10),
pair.MakePair("y", 20),
pair.MakePair("z", 30),
)
seq2 := FromSeqPair(pairs)
keys := []string{}
values := []int{}
for k, v := range seq2 {
keys = append(keys, k)
values = append(values, v)
}
assert.Equal(t, []string{"x", "y", "z"}, keys)
assert.Equal(t, []int{10, 20, 30}, values)
})
t.Run("works with single pair", func(t *testing.T) {
pairs := From(pair.MakePair("key", 42))
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
assert.Equal(t, map[string]int{"key": 42}, result)
})
}
// TestFromSeqPair_Empty tests FromSeqPair with empty sequences
func TestFromSeqPair_Empty(t *testing.T) {
t.Run("empty Seq produces empty Seq2", func(t *testing.T) {
pairs := Empty[Pair[int, string]]()
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
assert.Empty(t, result)
})
}
// TestFromSeqPair_WithComplexTypes tests FromSeqPair with complex types
func TestFromSeqPair_WithComplexTypes(t *testing.T) {
type Person struct {
Name string
Age int
}
t.Run("works with struct values", func(t *testing.T) {
pairs := From(
pair.MakePair(1, Person{"Alice", 30}),
pair.MakePair(2, Person{"Bob", 25}),
pair.MakePair(3, Person{"Charlie", 35}),
)
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
expected := map[int]Person{
1: {"Alice", 30},
2: {"Bob", 25},
3: {"Charlie", 35},
}
assert.Equal(t, expected, result)
})
}
// TestRoundTrip tests that ToSeqPair and FromSeqPair are inverses
func TestRoundTrip(t *testing.T) {
t.Run("ToSeqPair then FromSeqPair", func(t *testing.T) {
original := MonadZip(From(1, 2, 3), From("a", "b", "c"))
pairs := ToSeqPair(original)
restored := FromSeqPair(pairs)
result := toMap(restored)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
t.Run("FromSeqPair then ToSeqPair", func(t *testing.T) {
original := From(
pair.MakePair(1, "a"),
pair.MakePair(2, "b"),
pair.MakePair(3, "c"),
)
seq2 := FromSeqPair(original)
restored := ToSeqPair(seq2)
result := toSlice(restored)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, "a", pair.Tail(result[0]))
})
}
// Benchmark tests for Async2
func BenchmarkAsync2(b *testing.B) {
seq := MonadZip(From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), From("a", "b", "c", "d", "e", "f", "g", "h", "i", "j"))
b.ResetTimer()
for range b.N {
async := Async2(seq, 5)
for range async {
}
}
}
func BenchmarkAsync2_LargeSequence(b *testing.B) {
keys := make([]int, 1000)
values := make([]string, 1000)
for i := range keys {
keys[i] = i
values[i] = fmt.Sprintf("val%d", i)
}
seq := MonadZip(From(keys...), From(values...))
b.ResetTimer()
for range b.N {
async := Async2(seq, 100)
for range async {
}
}
}
// Benchmark tests for FromSeqPair
func BenchmarkFromSeqPair(b *testing.B) {
pairs := From(
pair.MakePair(1, "a"),
pair.MakePair(2, "b"),
pair.MakePair(3, "c"),
pair.MakePair(4, "d"),
pair.MakePair(5, "e"),
)
b.ResetTimer()
for range b.N {
seq2 := FromSeqPair(pairs)
for range seq2 {
}
}
}
func BenchmarkRoundTrip(b *testing.B) {
seq := MonadZip(From(1, 2, 3, 4, 5), From("a", "b", "c", "d", "e"))
b.ResetTimer()
for range b.N {
pairs := ToSeqPair(seq)
restored := FromSeqPair(pairs)
for range restored {
}
}
}
// Example tests for Async2
func ExampleAsync2() {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 10)
for k, v := range async {
fmt.Printf("%d: %s\n", k, v)
}
// Output:
// 1: a
// 2: b
// 3: c
}
func ExampleAsync2_earlyTermination() {
seq := MonadZip(From(1, 2, 3, 4, 5), From("a", "b", "c", "d", "e"))
async := Async2(seq, 5)
count := 0
for k, v := range async {
fmt.Printf("%d: %s\n", k, v)
count++
if count >= 2 {
break
}
}
// Output:
// 1: a
// 2: b
}
// Example tests for FromSeqPair
func ExampleFromSeqPair() {
pairs := From(
pair.MakePair(1, "a"),
pair.MakePair(2, "b"),
pair.MakePair(3, "c"),
)
seq2 := FromSeqPair(pairs)
for k, v := range seq2 {
fmt.Printf("%d: %s\n", k, v)
}
// Output:
// 1: a
// 2: b
// 3: c
}

View File

@@ -1002,3 +1002,80 @@ func ToSeqPair[A, B any](as Seq2[A, B]) Seq[Pair[A, B]] {
}
}
}
// FromSeqPair converts a sequence of Pairs into a key-value sequence.
//
// This function transforms a Seq[Pair[A, B]] (which yields Pair objects when iterated)
// into a Seq2[A, B] (which yields key-value pairs as separate arguments). This is the
// inverse operation of ToSeqPair and is useful when you need to convert from working
// with pairs as first-class values back to the key-value iteration pattern.
//
// # Type Parameters
//
// - A: The type of the first element (key) in each pair
// - B: The type of the second element (value) in each pair
//
// # Parameters
//
// - as: A Seq that yields Pair objects
//
// # Returns
//
// - Seq2[A, B]: A key-value sequence that yields the unpacked pairs
//
// # Example Usage
//
// // Create a sequence of pairs
// pairs := From(
// pair.MakePair("a", 1),
// pair.MakePair("b", 2),
// pair.MakePair("c", 3),
// )
// seq2 := FromSeqPair(pairs)
//
// // Iterate as key-value pairs
// for k, v := range seq2 {
// fmt.Printf("%s: %d\n", k, v)
// }
// // Output:
// // a: 1
// // b: 2
// // c: 3
//
// # Example with Map
//
// pairs := From(
// pair.MakePair(1, 10),
// pair.MakePair(2, 20),
// pair.MakePair(3, 30),
// )
// seq2 := FromSeqPair(pairs)
//
// // Use with Seq2 operations
// mapped := MonadMapWithKey(seq2, func(k, v int) int {
// return k + v
// })
// // yields: 11, 22, 33
//
// # Example - Round-trip conversion
//
// original := MonadZip(From(1, 2, 3), From("a", "b", "c"))
// pairs := ToSeqPair(original)
// restored := FromSeqPair(pairs)
// // restored is equivalent to original
//
// # See Also
//
// - ToSeqPair: Converts Seq2 to Seq of Pairs (inverse operation)
// - MonadZip: Creates key-value sequences from two sequences
// - pair.MakePair: Creates a Pair from two values
// - pair.Unpack: Unpacks a Pair into two values
func FromSeqPair[A, B any](as Seq[Pair[A, B]]) Seq2[A, B] {
return func(yield func(A, B) bool) {
for p := range as {
if !yield(pair.Unpack(p)) {
return
}
}
}
}

View File

@@ -115,10 +115,7 @@ func Inc[T Number](value T) T {
// result := Min(5, 10) // returns 5
// result := Min(3.14, 2.71) // returns 2.71
func Min[A C.Ordered](a, b A) A {
if a < b {
return a
}
return b
return min(a, b)
}
// Max returns the maximum of two ordered values.
@@ -132,10 +129,7 @@ func Min[A C.Ordered](a, b A) A {
// result := Max(5, 10) // returns 10
// result := Max(3.14, 2.71) // returns 3.14
func Max[A C.Ordered](a, b A) A {
if a > b {
return a
}
return b
return max(a, b)
}
// MoreThan is a curried comparison function that checks if a value is more than (greater than) another.