1
0
mirror of https://github.com/IBM/fp-go.git synced 2026-04-20 20:15:44 +02:00

Compare commits

...

3 Commits

Author SHA1 Message Date
Eliah Rusin b288003f93 doc: add fp-go/v2 LLM reference guide (#160)
Multi-layered documentation system for Claude Code sessions:
- fp-go-claude.md: compact CLAUDE.md snippet (260 lines)
- fp-go-cookbook.md: 20 migration recipes (1198 lines)
- fp-go-core-patterns.md: core types and composition (1598 lines)
- fp-go-mastery.md: advanced FP techniques (1437 lines)
- fp-go-full-reference.md: complete API inventory, 61 packages (6080 lines)

All type signatures extracted from actual source code.

Signed-off-by: franchb <hello@franchb.com>
2026-04-14 10:51:26 +02:00
Dr. Carsten Leue aed19f6edf fix: implement Merge operations for iterators
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-04-14 10:47:20 +02:00
Dr. Carsten Leue 45cc0a7fc1 fix: better traversal support
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-04-10 17:24:08 +02:00
37 changed files with 16342 additions and 166 deletions
+5
View File
@@ -1510,3 +1510,8 @@ func Extend[A, B any](f func([]A) B) Operator[A, B] {
func Extract[A any](as []A) A {
return G.Extract(as)
}
//go:inline
func UpdateAt[T any](i int, v T) func([]T) Option[[]T] {
return G.UpdateAt[[]T](i, v)
}
+13
View File
@@ -489,3 +489,16 @@ func Extend[GA ~[]A, GB ~[]B, A, B any](f func(GA) B) func(GA) GB {
return MakeBy[GB](len(as), func(i int) B { return f(as[i:]) })
}
}
func UpdateAt[GT ~[]T, T any](i int, v T) func(GT) O.Option[GT] {
none := O.None[GT]()
if i < 0 {
return F.Constant1[GT](none)
}
return func(g GT) O.Option[GT] {
if i >= len(g) {
return none
}
return O.Of(array.UnsafeUpdateAt(g, i, v))
}
}
+260
View File
@@ -0,0 +1,260 @@
# fp-go/v2 Reference for Claude Code
fp-go/v2 (`github.com/IBM/fp-go/v2`) is a typed functional programming library for Go 1.24+. It provides Option, Either/Result, IO, and Effect monads with data-last, curried APIs designed for pipeline composition via `Pipe` and `Flow`. The library follows Haskell/fp-ts conventions adapted for Go generics with explicit arity-numbered functions (e.g., `Pipe3`, `Flow2`). Two module families exist: **standard** (struct-based monads, full FP toolkit) and **idiomatic** (Go-native `(value, error)` tuples, zero-alloc, 2-32x faster).
## Import Conventions
| Alias | Package |
|-------|---------|
| `F` | `github.com/IBM/fp-go/v2/function` |
| `O` | `github.com/IBM/fp-go/v2/option` |
| `E` | `github.com/IBM/fp-go/v2/either` |
| `R` | `github.com/IBM/fp-go/v2/result` |
| `A` | `github.com/IBM/fp-go/v2/array` |
| `IO` | `github.com/IBM/fp-go/v2/io` |
| `IOR` | `github.com/IBM/fp-go/v2/ioresult` |
| `IOE` | `github.com/IBM/fp-go/v2/ioeither` |
| `RIO` | `github.com/IBM/fp-go/v2/context/readerioresult` |
| `EFF` | `github.com/IBM/fp-go/v2/effect` |
| `P` | `github.com/IBM/fp-go/v2/pair` |
| `T` | `github.com/IBM/fp-go/v2/tuple` |
| `N` | `github.com/IBM/fp-go/v2/number` |
| `S` | `github.com/IBM/fp-go/v2/string` |
| `B` | `github.com/IBM/fp-go/v2/boolean` |
| `L` | `github.com/IBM/fp-go/v2/optics/lens` |
| `PR` | `github.com/IBM/fp-go/v2/optics/prism` |
**Idiomatic variants** (tuple-based, zero-alloc):
| Alias | Package |
|-------|---------|
| `IR` | `github.com/IBM/fp-go/v2/idiomatic/result` |
| `IO_` | `github.com/IBM/fp-go/v2/idiomatic/option` |
| `IIR` | `github.com/IBM/fp-go/v2/idiomatic/ioresult` |
| `IRR` | `github.com/IBM/fp-go/v2/idiomatic/context/readerresult` |
| `IRO` | `github.com/IBM/fp-go/v2/idiomatic/readerioresult` |
## Monad Selection
- **Pure value** -- use the value directly, no wrapper needed
- **May be absent** -- `Option[A]` (struct-based) or `(A, bool)` (idiomatic)
- **Can fail with `error`** -- `Result[A]` = `Either[error, A]`
- Need custom error type E -- use `Either[E, A]` instead
- **Lazy + can fail** -- `IOResult[A]` = `func() Either[error, A]`
- Idiomatic: `func() (A, error)`
- **Needs `context.Context` + lazy + can fail** -- `ReaderIOResult[A]` via `context/readerioresult`
- Type: `func(context.Context) func() Either[error, A]`
- Idiomatic: `func(context.Context) (A, error)` via `idiomatic/context/readerresult`
- **Typed DI + context + lazy + can fail** -- `Effect[C, A]` via `effect` package
- Type: `func(C) func(context.Context) func() Either[error, A]`
- C is your dependency/config struct; context.Context is handled internally
- **Performance-critical** -- prefer `idiomatic/` variants throughout
## Standard vs Idiomatic
| Aspect | Standard | Idiomatic |
|--------|----------|-----------|
| Representation | `Either[error, A]` struct | `(A, error)` tuple |
| Performance | Baseline | 2-32x faster, zero allocs |
| Custom error types | `Either[E, A]` for any E | error only |
| Do-notation | Full support | Full support |
| FP toolkit | Complete | Complete |
| Go interop | Requires `Unwrap`/`Eitherize` | Native `(val, err)` |
**Rule of thumb**: Use idiomatic for production code and hot paths. Use standard when you need custom error types (`Either[E, A]`) or when composing with packages that use the standard types.
## Core Types
```go
// function package
type Void = struct{}
var VOID Void = struct{}{}
// option
type Option[A any] struct { /* Some/None */ }
// either
type Either[E, A any] struct { /* Left/Right */ }
// result (specialized Either)
type Result[A any] = Either[error, A]
// io
type IO[A any] = func() A
// ioresult
type IOResult[A any] = IO[Result[A]] // = func() Either[error, A]
// context/readerioresult
type ReaderIOResult[A any] = func(context.Context) func() Either[error, A]
// effect
type Effect[C, A any] = func(C) func(context.Context) func() Either[error, A]
type Kleisli[C, A, B any] = func(A) Effect[C, B]
// idiomatic equivalents
type IOResult[A any] = func() (A, error)
type ReaderResult[A any] = func(context.Context) (A, error)
```
## Key Rules
1. **Data-last**: Configuration/behavior params come first, data comes last. This enables partial application and pipeline composition.
2. **Type parameter ordering**: Non-inferrable type params come first. Example: `Ap[B, E, A]` -- B cannot be inferred, so it leads. `Map[A, B]` -- both usually inferred.
3. **Composition direction**:
- `F.Flow1/2/3/.../N` -- left-to-right (use this for pipelines)
- `Compose` -- right-to-left (mathematical convention; avoid in pipelines)
4. **Pipe vs Flow**:
- `F.Pipe3(value, f1, f2, f3)` -- apply data to a pipeline immediately
- `F.Flow3(f1, f2, f3)` -- create a reusable pipeline (returns a function)
5. **Arity-numbered functions**: `Pipe1` through `Pipe20`, `Flow1` through `Flow20`. Choose the number matching your operation count.
6. **Naming conventions**:
- `Chain` = flatMap/bind (`A -> F[B]`, flattens)
- `Map` = fmap (`A -> B`, lifts into context)
- `Ap` = applicative apply (apply wrapped function to wrapped value)
- `ChainFirst` / `Tap` = execute for side effects, keep original value
- `ChainEitherK` = lift pure `func(A) Either[E, B]` into monadic chain
- `Of` = pure/return (lift value into monad)
- `Fold` = catamorphism (handle both cases)
- `Left` / `Right` = Either constructors
- `Some` / `None` = Option constructors
7. **Prefer `result` over `either`** unless you need a custom error type E. `Result[A]` = `Either[error, A]`.
8. **Wrapping Go functions**:
- `result.Eitherize1(fn)` wraps `func(X) (Y, error)` into `func(X) Result[Y]`
- `result.Eitherize2(fn)` wraps `func(X, Y) (Z, error)` into `func(X, Y) Result[Z]`
- Variants up to `Eitherize15`
9. **Use `function.Void` / `function.VOID`** instead of `struct{}` / `struct{}{}`.
10. **Go 1.24+ required** (generic type aliases).
## Common Patterns
### Pipeline with Pipe
```go
result := F.Pipe3(
inputValue,
R.Map(transform),
R.Chain(validate),
R.Fold(onError, onSuccess),
)
```
### Reusable pipeline with Flow
```go
pipeline := F.Flow3(
R.Map(normalize),
R.Chain(validate),
R.Map(format),
)
output := pipeline(R.Of(input))
```
### Wrapping Go error functions
```go
safeParseInt := R.Eitherize1(strconv.Atoi)
// safeParseInt: func(string) Result[int]
result := safeParseInt("42") // Right(42)
```
### Effect with DI
```go
type Deps struct { DB *sql.DB }
fetchUser := EFF.Eitherize(func(deps Deps, ctx context.Context) (*User, error) {
return deps.DB.QueryRowContext(ctx, "SELECT ...").Scan(...)
})
// fetchUser: Effect[Deps, *User]
// Execute:
val, err := EFF.RunSync(EFF.Provide[*User](myDeps)(fetchUser))(ctx)
```
### Effect composition
```go
pipeline := F.Pipe1(
fetchUser,
EFF.Map[Deps](func(u *User) string { return u.Name }),
)
```
### Do-notation (building up state)
```go
type State struct { X int; Y string }
result := F.Pipe3(
R.Do(State{}),
R.Bind(
func(x int) func(State) State {
return func(s State) State { s.X = x; return s }
},
func(s State) Result[int] { return R.Of(42) },
),
R.Let(
func(y string) func(State) State {
return func(s State) State { s.Y = y; return s }
},
func(s State) string { return fmt.Sprintf("val=%d", s.X) },
),
)
```
### Optics (Lens)
```go
type Person struct { Name string; Age int }
nameLens := L.MakeLens(
func(p Person) string { return p.Name },
func(p Person, name string) Person { p.Name = name; return p },
)
name := nameLens.Get(person) // get
updated := nameLens.Set("Bob")(person) // set (returns new Person)
modified := L.Modify(strings.ToUpper)(nameLens)(person) // modify
```
### Option handling
```go
result := F.Pipe3(
O.Some(42),
O.Map(func(x int) int { return x * 2 }),
O.GetOrElse(F.Constant(0)),
)
```
### Idiomatic IOResult
```go
readFile := func() ([]byte, error) { return os.ReadFile("config.json") }
// This IS an idiomatic IOResult[[]byte] -- just a func() ([]byte, error)
parsed := IIR.Map(parseConfig)(readFile)
config, err := parsed()
```
### ReaderIOResult (context-dependent IO)
```go
// Eitherize1 wraps func(context.Context, T0) (R, error) -> func(T0) ReaderIOResult[R]
fetchURL := RIO.Eitherize1(func(ctx context.Context, url string) ([]byte, error) {
req, _ := http.NewRequestWithContext(ctx, "GET", url, nil)
resp, err := http.DefaultClient.Do(req)
if err != nil { return nil, err }
defer resp.Body.Close()
return iolib.ReadAll(resp.Body)
})
// fetchURL: func(string) ReaderIOResult[[]byte]
result := fetchURL("https://example.com")(ctx)() // execute
```
## Deeper Documentation
- `fp-go-cookbook.md` -- migration recipes and "how do I X in fp-go?"
- `fp-go-core-patterns.md` -- core types, operations, and composition details
- `fp-go-mastery.md` -- advanced FP techniques, architecture, and Effect system
- `fp-go-full-reference.md` -- complete API inventory across all packages
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
+8
View File
@@ -15,6 +15,8 @@
package array
import "slices"
func Of[GA ~[]A, A any](a A) GA {
return GA{a}
}
@@ -197,3 +199,9 @@ func Reverse[GT ~[]T, T any](as GT) GT {
}
return ras
}
func UnsafeUpdateAt[GT ~[]T, T any](as GT, i int, v T) GT {
c := slices.Clone(as)
c[i] = v
return c
}
+92 -33
View File
@@ -16,10 +16,10 @@
package iter
import (
N "github.com/IBM/fp-go/v2/number"
A "github.com/IBM/fp-go/v2/array"
)
// Async converts a synchronous sequence into an asynchronous buffered sequence.
// AsyncBuf converts a synchronous sequence into an asynchronous buffered sequence.
// It spawns a goroutine to consume the input sequence and sends values through
// a buffered channel, allowing concurrent production and consumption of elements.
//
@@ -57,7 +57,7 @@ import (
//
// // Create an async sequence with a buffer of 10
// seq := From(1, 2, 3, 4, 5)
// async := Async(seq, 10)
// async := AsyncBuf(seq, 10)
//
// // Elements are produced concurrently
// for v := range async {
@@ -67,7 +67,7 @@ import (
// # Example with Early Termination
//
// seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
// async := Async(seq, 5)
// async := AsyncBuf(seq, 5)
//
// // Stop after 3 elements - producer goroutine will be properly cleaned up
// count := 0
@@ -83,7 +83,7 @@ import (
//
// // bufSize of 0 creates an unbuffered channel
// seq := From(1, 2, 3)
// async := Async(seq, 0)
// async := AsyncBuf(seq, 0)
//
// // Producer and consumer are synchronized
// for v := range async {
@@ -95,32 +95,48 @@ import (
// - From: Creates a sequence from values
// - Map: Transforms sequence elements
// - Filter: Filters sequence elements
func Async[T any](input Seq[T], bufSize int) Seq[T] {
return func(yield func(T) bool) {
ch := make(chan T, N.Max(bufSize, 0))
done := make(chan Void)
go func() {
defer close(ch)
for v := range input {
select {
case ch <- v:
case <-done:
return
}
}
}()
defer close(done)
for v := range ch {
if !yield(v) {
return
}
}
}
func AsyncBuf[T any](input Seq[T], bufSize int) Seq[T] {
return MergeBuf(A.Of(input), bufSize)
}
// Async2 converts a synchronous key-value sequence into an asynchronous buffered sequence.
// Async converts a synchronous sequence into an asynchronous sequence using a default buffer size.
// This is a convenience wrapper around AsyncBuf that uses a default buffer size of 8.
//
// Type Parameters:
// - T: The type of elements in the sequence
//
// Parameters:
// - input: The source sequence to be consumed asynchronously
//
// Returns:
// - Seq[T]: A new sequence that yields elements from the input sequence asynchronously
//
// Behavior:
// - Uses a default buffer size of 8 for the internal channel
// - Spawns a goroutine that consumes the input sequence
// - Elements are sent through a buffered channel to the output sequence
// - Properly handles early termination with goroutine cleanup
// - The channel is closed when the input sequence is exhausted
//
// Example:
//
// seq := From(1, 2, 3, 4, 5)
// async := Async(seq)
//
// // Elements are produced concurrently
// for v := range async {
// fmt.Println(v) // Prints: 1, 2, 3, 4, 5
// }
//
// See Also:
// - AsyncBuf: Async with custom buffer size
// - Async2: Asynchronous sequence for key-value sequences
// - Merge: Merges multiple sequences concurrently
func Async[T any](input Seq[T]) Seq[T] {
return AsyncBuf(input, defaultBufferSize)
}
// Async2Buf converts a synchronous key-value sequence into an asynchronous buffered sequence.
// It spawns a goroutine to consume the input sequence and sends key-value pairs through
// a buffered channel, allowing concurrent production and consumption of elements.
//
@@ -158,7 +174,7 @@ func Async[T any](input Seq[T], bufSize int) Seq[T] {
//
// // Create an async key-value sequence with a buffer of 10
// seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
// async := Async2(seq, 10)
// async := Async2Buf(seq, 10)
//
// // Elements are produced concurrently
// for k, v := range async {
@@ -172,7 +188,7 @@ func Async[T any](input Seq[T], bufSize int) Seq[T] {
// # Example with Early Termination
//
// seq := MonadZip(From(1, 2, 3, 4, 5), From("a", "b", "c", "d", "e"))
// async := Async2(seq, 5)
// async := Async2Buf(seq, 5)
//
// // Stop after 2 pairs - producer goroutine will be properly cleaned up
// count := 0
@@ -190,6 +206,49 @@ func Async[T any](input Seq[T], bufSize int) Seq[T] {
// - ToSeqPair: Converts Seq2 to Seq of Pairs
// - FromSeqPair: Converts Seq of Pairs to Seq2
// - MonadZip: Creates key-value sequences from two sequences
func Async2[K, V any](input Seq2[K, V], bufSize int) Seq2[K, V] {
return FromSeqPair(Async(ToSeqPair(input), bufSize))
func Async2Buf[K, V any](input Seq2[K, V], bufSize int) Seq2[K, V] {
return FromSeqPair(AsyncBuf(ToSeqPair(input), bufSize))
}
// Async2 converts a synchronous key-value sequence into an asynchronous sequence using a default buffer size.
// This is a convenience wrapper around Async2Buf that uses a default buffer size of 8.
// It's the Seq2 variant of Async, providing the same asynchronous behavior for key-value sequences.
//
// Type Parameters:
// - K: The type of keys in the sequence
// - V: The type of values in the sequence
//
// Parameters:
// - input: The source key-value sequence to be consumed asynchronously
//
// Returns:
// - Seq2[K, V]: A new key-value sequence that yields elements from the input sequence asynchronously
//
// Behavior:
// - Uses a default buffer size of 8 for the internal channel
// - Spawns a goroutine that consumes the input key-value sequence
// - Key-value pairs are sent through a buffered channel to the output sequence
// - Properly handles early termination with goroutine cleanup
// - The channel is closed when the input sequence is exhausted
//
// Example:
//
// seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
// async := Async2(seq)
//
// // Elements are produced concurrently
// for k, v := range async {
// fmt.Printf("%d: %s\n", k, v)
// }
// // Output:
// // 1: a
// // 2: b
// // 3: c
//
// See Also:
// - Async2Buf: Async2 with custom buffer size
// - Async: Asynchronous sequence for single-value sequences
// - MonadZip: Creates key-value sequences from two sequences
func Async2[K, V any](input Seq2[K, V]) Seq2[K, V] {
return Async2Buf(input, defaultBufferSize)
}
+79 -81
View File
@@ -30,21 +30,21 @@ import (
func TestAsync_Success(t *testing.T) {
t.Run("converts sequence to async with buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("preserves element order", func(t *testing.T) {
seq := From("a", "b", "c", "d", "e")
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
result := toSlice(async)
assert.Equal(t, []string{"a", "b", "c", "d", "e"}, result)
})
t.Run("works with single element", func(t *testing.T) {
seq := From(42)
async := Async(seq, 1)
async := AsyncBuf(seq, 1)
result := toSlice(async)
assert.Equal(t, []int{42}, result)
})
@@ -55,7 +55,7 @@ func TestAsync_Success(t *testing.T) {
data[i] = i
}
seq := From(data...)
async := Async(seq, 20)
async := AsyncBuf(seq, 20)
result := toSlice(async)
assert.Equal(t, data, result)
})
@@ -65,42 +65,42 @@ func TestAsync_Success(t *testing.T) {
func TestAsync_BufferSizes(t *testing.T) {
t.Run("unbuffered channel (bufSize 0)", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 0)
async := AsyncBuf(seq, 0)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("small buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 2)
async := AsyncBuf(seq, 2)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("large buffer", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 100)
async := AsyncBuf(seq, 100)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("negative buffer size treated as 0", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, -5)
async := AsyncBuf(seq, -5)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("buffer size equals sequence length", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("buffer size larger than sequence", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
@@ -110,21 +110,21 @@ func TestAsync_BufferSizes(t *testing.T) {
func TestAsync_Empty(t *testing.T) {
t.Run("empty integer sequence", func(t *testing.T) {
seq := Empty[int]()
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
result := toSlice(async)
assert.Empty(t, result)
})
t.Run("empty string sequence", func(t *testing.T) {
seq := Empty[string]()
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
result := toSlice(async)
assert.Empty(t, result)
})
t.Run("empty with zero buffer", func(t *testing.T) {
seq := Empty[int]()
async := Async(seq, 0)
async := AsyncBuf(seq, 0)
result := toSlice(async)
assert.Empty(t, result)
})
@@ -145,7 +145,7 @@ func TestAsync_EarlyTermination(t *testing.T) {
}
}
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
// Consume only 5 elements
count := 0
@@ -168,7 +168,7 @@ func TestAsync_EarlyTermination(t *testing.T) {
t.Run("handles yield returning false", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
collected := []int{}
for v := range async {
@@ -183,7 +183,7 @@ func TestAsync_EarlyTermination(t *testing.T) {
t.Run("early termination with unbuffered channel", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 0)
async := AsyncBuf(seq, 0)
collected := []int{}
for v := range async {
@@ -210,7 +210,7 @@ func TestAsync_WithComplexTypes(t *testing.T) {
Person{"Bob", 25},
Person{"Charlie", 35},
)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
result := toSlice(async)
expected := []Person{
{"Alice", 30},
@@ -225,14 +225,14 @@ func TestAsync_WithComplexTypes(t *testing.T) {
p2 := &Person{"Bob", 25}
p3 := &Person{"Charlie", 35}
seq := From(p1, p2, p3)
async := Async(seq, 3)
async := AsyncBuf(seq, 3)
result := toSlice(async)
assert.Equal(t, []*Person{p1, p2, p3}, result)
})
t.Run("works with slices", func(t *testing.T) {
seq := From([]int{1, 2}, []int{3, 4}, []int{5, 6})
async := Async(seq, 2)
async := AsyncBuf(seq, 2)
result := toSlice(async)
expected := [][]int{{1, 2}, {3, 4}, {5, 6}}
assert.Equal(t, expected, result)
@@ -243,7 +243,7 @@ func TestAsync_WithComplexTypes(t *testing.T) {
m2 := map[string]int{"b": 2}
m3 := map[string]int{"c": 3}
seq := From(m1, m2, m3)
async := Async(seq, 3)
async := AsyncBuf(seq, 3)
result := toSlice(async)
assert.Equal(t, []map[string]int{m1, m2, m3}, result)
})
@@ -254,14 +254,14 @@ func TestAsync_WithChainedOperations(t *testing.T) {
t.Run("async after map", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
mapped := MonadMap(seq, N.Mul(2))
async := Async(mapped, 5)
async := AsyncBuf(mapped, 5)
result := toSlice(async)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("map after async", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
mapped := MonadMap(async, N.Mul(2))
result := toSlice(mapped)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
@@ -270,14 +270,14 @@ func TestAsync_WithChainedOperations(t *testing.T) {
t.Run("async after filter", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
filtered := MonadFilter(seq, func(x int) bool { return x%2 == 0 })
async := Async(filtered, 5)
async := AsyncBuf(filtered, 5)
result := toSlice(async)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("filter after async", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
filtered := MonadFilter(async, func(x int) bool { return x%2 == 0 })
result := toSlice(filtered)
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
@@ -288,15 +288,15 @@ func TestAsync_WithChainedOperations(t *testing.T) {
chained := MonadChain(seq, func(x int) Seq[int] {
return From(x, x*10)
})
async := Async(chained, 10)
async := AsyncBuf(chained, 10)
result := toSlice(async)
assert.Equal(t, []int{1, 10, 2, 20, 3, 30}, result)
})
t.Run("multiple async operations", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async1 := Async(seq, 3)
async2 := Async(async1, 2)
async1 := AsyncBuf(seq, 3)
async2 := AsyncBuf(async1, 2)
result := toSlice(async2)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
@@ -314,26 +314,26 @@ func TestAsync_Concurrency(t *testing.T) {
}
}
}
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
result := toSlice(async)
// Verify all elements are produced correctly
assert.Equal(t, []int{0, 1, 2, 3, 4}, result)
})
t.Run("handles concurrent consumption safely", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
// Consume with some processing time
var sum atomic.Int32
for v := range async {
sum.Add(int32(v))
time.Sleep(1 * time.Millisecond)
}
assert.Equal(t, int32(55), sum.Load())
})
}
@@ -342,28 +342,28 @@ func TestAsync_Concurrency(t *testing.T) {
func TestAsync_EdgeCases(t *testing.T) {
t.Run("very large buffer size", func(t *testing.T) {
seq := From(1, 2, 3)
async := Async(seq, 1000000)
async := AsyncBuf(seq, 1000000)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3}, result)
})
t.Run("buffer size of 1", func(t *testing.T) {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 1)
async := AsyncBuf(seq, 1)
result := toSlice(async)
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
t.Run("works with replicate", func(t *testing.T) {
seq := Replicate(5, 42)
async := Async(seq, 3)
async := AsyncBuf(seq, 3)
result := toSlice(async)
assert.Equal(t, []int{42, 42, 42, 42, 42}, result)
})
t.Run("works with makeBy", func(t *testing.T) {
seq := MakeBy(5, func(i int) int { return i * i })
async := Async(seq, 3)
async := AsyncBuf(seq, 3)
result := toSlice(async)
assert.Equal(t, []int{0, 1, 4, 9, 16}, result)
})
@@ -374,7 +374,7 @@ func BenchmarkAsync(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
for range async {
}
}
@@ -388,7 +388,7 @@ func BenchmarkAsync_LargeSequence(b *testing.B) {
seq := From(data...)
b.ResetTimer()
for range b.N {
async := Async(seq, 100)
async := AsyncBuf(seq, 100)
for range async {
}
}
@@ -398,7 +398,7 @@ func BenchmarkAsync_SmallBuffer(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 1)
async := AsyncBuf(seq, 1)
for range async {
}
}
@@ -408,7 +408,7 @@ func BenchmarkAsync_LargeBuffer(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 100)
async := AsyncBuf(seq, 100)
for range async {
}
}
@@ -418,7 +418,7 @@ func BenchmarkAsync_Unbuffered(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 0)
async := AsyncBuf(seq, 0)
for range async {
}
}
@@ -428,7 +428,7 @@ func BenchmarkAsync_WithMap(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
mapped := MonadMap(async, N.Mul(2))
for range mapped {
}
@@ -439,7 +439,7 @@ func BenchmarkAsync_WithFilter(b *testing.B) {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
filtered := MonadFilter(async, func(x int) bool { return x%2 == 0 })
for range filtered {
}
@@ -449,7 +449,7 @@ func BenchmarkAsync_WithFilter(b *testing.B) {
// Example tests for documentation
func ExampleAsync() {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 10)
async := AsyncBuf(seq, 10)
for v := range async {
fmt.Printf("%d ", v)
@@ -459,7 +459,7 @@ func ExampleAsync() {
func ExampleAsync_unbuffered() {
seq := From(1, 2, 3)
async := Async(seq, 0)
async := AsyncBuf(seq, 0)
for v := range async {
fmt.Printf("%d ", v)
@@ -469,7 +469,7 @@ func ExampleAsync_unbuffered() {
func ExampleAsync_earlyTermination() {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
count := 0
for v := range async {
@@ -484,7 +484,7 @@ func ExampleAsync_earlyTermination() {
func ExampleAsync_withMap() {
seq := From(1, 2, 3, 4, 5)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
doubled := MonadMap(async, N.Mul(2))
for v := range doubled {
@@ -495,7 +495,7 @@ func ExampleAsync_withMap() {
func ExampleAsync_withFilter() {
seq := From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
async := Async(seq, 5)
async := AsyncBuf(seq, 5)
evens := MonadFilter(async, func(x int) bool { return x%2 == 0 })
for v := range evens {
@@ -508,7 +508,7 @@ func ExampleAsync_withFilter() {
func TestAsync2_Success(t *testing.T) {
t.Run("converts Seq2 to async with buffer", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 10)
async := Async2Buf(seq, 10)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
@@ -516,22 +516,22 @@ func TestAsync2_Success(t *testing.T) {
t.Run("preserves key-value pairs order", func(t *testing.T) {
seq := MonadZip(From("x", "y", "z"), From(10, 20, 30))
async := Async2(seq, 5)
async := Async2Buf(seq, 5)
keys := []string{}
values := []int{}
for k, v := range async {
keys = append(keys, k)
values = append(values, v)
}
assert.Equal(t, []string{"x", "y", "z"}, keys)
assert.Equal(t, []int{10, 20, 30}, values)
})
t.Run("works with single pair", func(t *testing.T) {
seq := Of2("key", 42)
async := Async2(seq, 1)
async := Async2Buf(seq, 1)
result := toMap(async)
assert.Equal(t, map[string]int{"key": 42}, result)
})
@@ -544,7 +544,7 @@ func TestAsync2_Success(t *testing.T) {
values[i] = fmt.Sprintf("val%d", i)
}
seq := MonadZip(From(keys...), From(values...))
async := Async2(seq, 20)
async := Async2Buf(seq, 20)
result := toMap(async)
assert.Equal(t, 100, len(result))
for i := range 100 {
@@ -557,7 +557,7 @@ func TestAsync2_Success(t *testing.T) {
func TestAsync2_BufferSizes(t *testing.T) {
t.Run("unbuffered channel (bufSize 0)", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 0)
async := Async2Buf(seq, 0)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
@@ -565,7 +565,7 @@ func TestAsync2_BufferSizes(t *testing.T) {
t.Run("negative buffer size treated as 0", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, -5)
async := Async2Buf(seq, -5)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
@@ -573,7 +573,7 @@ func TestAsync2_BufferSizes(t *testing.T) {
t.Run("large buffer", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 100)
async := Async2Buf(seq, 100)
result := toMap(async)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
@@ -584,7 +584,7 @@ func TestAsync2_BufferSizes(t *testing.T) {
func TestAsync2_Empty(t *testing.T) {
t.Run("empty Seq2", func(t *testing.T) {
seq := MonadZip(Empty[int](), Empty[string]())
async := Async2(seq, 5)
async := Async2Buf(seq, 5)
result := toMap(async)
assert.Empty(t, result)
})
@@ -594,8 +594,8 @@ func TestAsync2_Empty(t *testing.T) {
func TestAsync2_EarlyTermination(t *testing.T) {
t.Run("stops producer when consumer breaks", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), From("a", "b", "c", "d", "e", "f", "g", "h", "i", "j"))
async := Async2(seq, 5)
async := Async2Buf(seq, 5)
count := 0
for range async {
count++
@@ -603,7 +603,7 @@ func TestAsync2_EarlyTermination(t *testing.T) {
break
}
}
assert.Equal(t, 3, count)
})
}
@@ -613,7 +613,7 @@ func TestAsync2_WithChainedOperations(t *testing.T) {
t.Run("async2 after map", func(t *testing.T) {
seq := MonadZip(From(1, 2, 3), From(10, 20, 30))
mapped := MonadMapWithKey(seq, func(k, v int) int { return k + v })
async := Async2(mapped, 5)
async := Async2Buf(mapped, 5)
result := toMap(async)
expected := map[int]int{1: 11, 2: 22, 3: 33}
assert.Equal(t, expected, result)
@@ -626,7 +626,7 @@ func TestToSeqPair_Success(t *testing.T) {
seq2 := MonadZip(From(1, 2, 3), From("a", "b", "c"))
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, "a", pair.Tail(result[0]))
@@ -640,7 +640,7 @@ func TestToSeqPair_Success(t *testing.T) {
seq2 := MonadZip(From("x", "y", "z"), From(10, 20, 30))
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
for i, p := range result {
expectedKey := string(rune('x' + i))
@@ -654,7 +654,7 @@ func TestToSeqPair_Success(t *testing.T) {
seq2 := Of2("key", 42)
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 1, len(result))
assert.Equal(t, "key", pair.Head(result[0]))
assert.Equal(t, 42, pair.Tail(result[0]))
@@ -685,7 +685,7 @@ func TestToSeqPair_WithComplexTypes(t *testing.T) {
)
pairs := ToSeqPair(seq2)
result := toSlice(pairs)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, Person{"Alice", 30}, pair.Tail(result[0]))
@@ -702,7 +702,7 @@ func TestFromSeqPair_Success(t *testing.T) {
)
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
@@ -714,14 +714,14 @@ func TestFromSeqPair_Success(t *testing.T) {
pair.MakePair("z", 30),
)
seq2 := FromSeqPair(pairs)
keys := []string{}
values := []int{}
for k, v := range seq2 {
keys = append(keys, k)
values = append(values, v)
}
assert.Equal(t, []string{"x", "y", "z"}, keys)
assert.Equal(t, []int{10, 20, 30}, values)
})
@@ -730,7 +730,7 @@ func TestFromSeqPair_Success(t *testing.T) {
pairs := From(pair.MakePair("key", 42))
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
assert.Equal(t, map[string]int{"key": 42}, result)
})
}
@@ -760,7 +760,7 @@ func TestFromSeqPair_WithComplexTypes(t *testing.T) {
)
seq2 := FromSeqPair(pairs)
result := toMap(seq2)
expected := map[int]Person{
1: {"Alice", 30},
2: {"Bob", 25},
@@ -777,7 +777,7 @@ func TestRoundTrip(t *testing.T) {
pairs := ToSeqPair(original)
restored := FromSeqPair(pairs)
result := toMap(restored)
expected := map[int]string{1: "a", 2: "b", 3: "c"}
assert.Equal(t, expected, result)
})
@@ -791,7 +791,7 @@ func TestRoundTrip(t *testing.T) {
seq2 := FromSeqPair(original)
restored := ToSeqPair(seq2)
result := toSlice(restored)
assert.Equal(t, 3, len(result))
assert.Equal(t, 1, pair.Head(result[0]))
assert.Equal(t, "a", pair.Tail(result[0]))
@@ -803,7 +803,7 @@ func BenchmarkAsync2(b *testing.B) {
seq := MonadZip(From(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), From("a", "b", "c", "d", "e", "f", "g", "h", "i", "j"))
b.ResetTimer()
for range b.N {
async := Async2(seq, 5)
async := Async2Buf(seq, 5)
for range async {
}
}
@@ -819,7 +819,7 @@ func BenchmarkAsync2_LargeSequence(b *testing.B) {
seq := MonadZip(From(keys...), From(values...))
b.ResetTimer()
for range b.N {
async := Async2(seq, 100)
async := Async2Buf(seq, 100)
for range async {
}
}
@@ -856,7 +856,7 @@ func BenchmarkRoundTrip(b *testing.B) {
// Example tests for Async2
func ExampleAsync2() {
seq := MonadZip(From(1, 2, 3), From("a", "b", "c"))
async := Async2(seq, 10)
async := Async2Buf(seq, 10)
for k, v := range async {
fmt.Printf("%d: %s\n", k, v)
@@ -869,7 +869,7 @@ func ExampleAsync2() {
func ExampleAsync2_earlyTermination() {
seq := MonadZip(From(1, 2, 3, 4, 5), From("a", "b", "c", "d", "e"))
async := Async2(seq, 5)
async := Async2Buf(seq, 5)
count := 0
for k, v := range async {
@@ -901,5 +901,3 @@ func ExampleFromSeqPair() {
// 2: b
// 3: c
}
+25
View File
@@ -495,6 +495,26 @@ func FlatMap[A, B any](f func(A) Seq[B]) Operator[A, B] {
return Chain(f)
}
// ConcatMap is an alias for Chain that emphasizes sequential concatenation.
// It maps each element to a sequence and concatenates the results in order.
//
// Unlike concurrent operations, ConcatMap preserves the order of elements:
// it fully processes each input element (yielding all elements from f(a))
// before moving to the next input element.
//
// Example:
//
// seq := From(1, 2, 3)
// result := ConcatMap(func(x int) Seq[int] {
// return From(x, x*10)
// })(seq)
// // yields: 1, 10, 2, 20, 3, 30 (order preserved)
//
//go:inline
func ConcatMap[A, B any](f func(A) Seq[B]) Operator[A, B] {
return Chain(f)
}
// Flatten flattens a sequence of sequences into a single sequence.
//
// Marble Diagram:
@@ -516,6 +536,11 @@ func Flatten[A any](mma Seq[Seq[A]]) Seq[A] {
return MonadChain(mma, F.Identity[Seq[A]])
}
//go:inline
func ConcatAll[A any](mma Seq[Seq[A]]) Seq[A] {
return Flatten(mma)
}
// MonadAp applies a sequence of functions to a sequence of values.
// This is the applicative apply operation.
//
+563
View File
@@ -0,0 +1,563 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package iter
import (
"slices"
"sync"
A "github.com/IBM/fp-go/v2/array"
F "github.com/IBM/fp-go/v2/function"
M "github.com/IBM/fp-go/v2/monoid"
N "github.com/IBM/fp-go/v2/number"
)
const (
defaultBufferSize = 8
)
// MergeBuf merges multiple sequences concurrently into a single sequence.
// It spawns a goroutine for each input sequence and merges their elements through
// a buffered channel, allowing concurrent production from all sources. The output
// order is non-deterministic and depends on the timing of concurrent producers.
//
// This function is useful for combining results from multiple concurrent operations,
// processing data from multiple sources in parallel, or implementing fan-in patterns
// where multiple producers feed into a single consumer.
//
// Type Parameters:
// - T: The type of elements in the sequences
//
// Parameters:
// - iterables: A slice of sequences to merge. If empty, returns an empty sequence.
// - bufSize: The buffer size for the internal channel. Negative values are treated as 0 (unbuffered).
// A larger buffer allows more elements to be produced ahead of consumption,
// reducing contention between producers but using more memory.
// A buffer of 0 creates an unbuffered channel requiring synchronization.
//
// Returns:
// - Seq[T]: A new sequence that yields elements from all input sequences in non-deterministic order
//
// Behavior:
// - Spawns one goroutine per input sequence to produce elements concurrently
// - Elements from different sequences are interleaved non-deterministically
// - Properly handles early termination: if the consumer stops iterating (yield returns false),
// all producer goroutines are signaled to stop and cleaned up
// - The output channel is closed when all input sequences are exhausted
// - No goroutines leak even with early termination
// - Thread-safe: multiple producers can safely send to the shared channel
//
// Example Usage:
//
// // MergeBuf three sequences concurrently
// seq1 := From(1, 2, 3)
// seq2 := From(4, 5, 6)
// seq3 := From(7, 8, 9)
// merged := MergeBuf([]Seq[int]{seq1, seq2, seq3}, 10)
//
// // Elements appear in non-deterministic order
// for v := range merged {
// fmt.Println(v) // May print: 1, 4, 7, 2, 5, 8, 3, 6, 9 (order varies)
// }
//
// Example with Early Termination:
//
// seq1 := From(1, 2, 3, 4, 5)
// seq2 := From(6, 7, 8, 9, 10)
// merged := MergeBuf([]Seq[int]{seq1, seq2}, 5)
//
// // Stop after 3 elements - all producer goroutines will be properly cleaned up
// count := 0
// for v := range merged {
// fmt.Println(v)
// count++
// if count >= 3 {
// break
// }
// }
//
// Example with Unbuffered Channel:
//
// // bufSize of 0 creates an unbuffered channel
// seq1 := From(1, 2, 3)
// seq2 := From(4, 5, 6)
// merged := MergeBuf([]Seq[int]{seq1, seq2}, 0)
//
// // Producers and consumer are synchronized
// for v := range merged {
// fmt.Println(v)
// }
//
// See Also:
// - Async: Converts a single sequence to asynchronous
// - From: Creates a sequence from values
// - MonadChain: Sequentially chains sequences (deterministic order)
func MergeBuf[T any](iterables []Seq[T], bufSize int) Seq[T] {
return F.Pipe2(
iterables,
slices.Values,
MergeAll[T](bufSize),
)
}
// Merge merges multiple sequences concurrently into a single sequence using a default buffer size.
// This is a convenience wrapper around MergeBuf that uses a default buffer size of 8.
//
// Type Parameters:
// - T: The type of elements in the sequences
//
// Parameters:
// - iterables: A slice of sequences to merge. If empty, returns an empty sequence.
//
// Returns:
// - Seq[T]: A new sequence that yields elements from all input sequences in non-deterministic order
//
// Behavior:
// - Uses a default buffer size of 8 for the internal channel
// - Spawns one goroutine per input sequence to produce elements concurrently
// - Elements from different sequences are interleaved non-deterministically
// - Properly handles early termination with goroutine cleanup
// - Thread-safe: multiple producers can safely send to the shared channel
//
// Example:
//
// seq1 := From(1, 2, 3)
// seq2 := From(4, 5, 6)
// seq3 := From(7, 8, 9)
// merged := Merge([]Seq[int]{seq1, seq2, seq3})
//
// // Elements appear in non-deterministic order
// for v := range merged {
// fmt.Println(v) // May print: 1, 4, 7, 2, 5, 8, 3, 6, 9 (order varies)
// }
//
// See Also:
// - MergeBuf: Merge with custom buffer size
// - MergeAll: Merges a sequence of sequences
// - Async: Converts a single sequence to asynchronous
func Merge[T any](iterables []Seq[T]) Seq[T] {
return MergeBuf(iterables, defaultBufferSize)
}
// MergeMonoid creates a Monoid for merging sequences concurrently.
// The monoid combines two sequences by merging them concurrently with the specified
// buffer size, and uses an empty sequence as the identity element.
//
// A Monoid is an algebraic structure with an associative binary operation (concat)
// and an identity element (empty). For sequences, the concat operation merges two
// sequences concurrently, and the identity is an empty sequence.
//
// This is useful for functional composition patterns where you need to combine
// multiple sequences using monoid operations like Reduce, FoldMap, or when working
// with monadic operations that require a monoid instance.
//
// Marble Diagram (Concurrent Merging):
//
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Merge: --1-4-2-5-3-6--|
// (non-deterministic order)
//
// Marble Diagram (vs ConcatMonoid):
//
// MergeMonoid (concurrent):
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Result: --1-4-2-5-3-6--|
// (elements interleaved)
//
// ConcatMonoid (sequential):
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Result: --1--2--3--4--5--6--|
// (deterministic order)
//
// Type Parameters:
// - T: The type of elements in the sequences
//
// Parameters:
// - bufSize: The buffer size for the internal channel used during merging.
// This buffer size will be used for all merge operations performed by the monoid.
// Negative values are treated as 0 (unbuffered).
//
// Returns:
// - Monoid[Seq[T]]: A monoid instance with:
// - Concat: Merges two sequences concurrently using Merge
// - Empty: Returns an empty sequence
//
// Properties:
// - Identity: concat(empty, x) = concat(x, empty) = x
// - Associativity: concat(concat(a, b), c) = concat(a, concat(b, c))
// Note: Due to concurrent execution, element order may vary between equivalent expressions
//
// Example Usage:
//
// // Create a monoid for merging integer sequences
// monoid := MergeMonoid[int](10)
//
// // Use with Reduce to merge multiple sequences
// sequences := []Seq[int]{
// From(1, 2, 3),
// From(4, 5, 6),
// From(7, 8, 9),
// }
// merged := MonadReduce(From(sequences...), monoid.Concat, monoid.Empty)
// // merged contains all elements from all sequences (order non-deterministic)
//
// Example with Empty Identity:
//
// monoid := MergeMonoid[int](5)
// seq := From(1, 2, 3)
//
// // Merging with empty is identity
// result1 := monoid.Concat(monoid.Empty, seq) // same as seq
// result2 := monoid.Concat(seq, monoid.Empty) // same as seq
//
// Example with FoldMap:
//
// // Convert each number to a sequence and merge all results
// monoid := MergeMonoid[int](10)
// numbers := From(1, 2, 3)
// result := MonadFoldMap(numbers, func(n int) Seq[int] {
// return From(n, n*10, n*100)
// }, monoid)
// // result contains: 1, 10, 100, 2, 20, 200, 3, 30, 300 (order varies)
//
// See Also:
// - Merge: The underlying merge function
// - MergeAll: Merges multiple sequences at once
// - Empty: Creates an empty sequence
func MergeMonoid[T any](bufSize int) M.Monoid[Seq[T]] {
return M.MakeMonoid(
func(l, r Seq[T]) Seq[T] {
return MergeBuf(A.From(l, r), bufSize)
},
Empty[T](),
)
}
// MergeAll creates an operator that flattens and merges a sequence of sequences concurrently.
// It takes a sequence of sequences (Seq[Seq[T]]) and produces a single flat sequence (Seq[T])
// by spawning a goroutine for each inner sequence as it arrives, merging all their elements
// through a buffered channel. This enables dynamic concurrent processing where inner sequences
// can be produced and consumed concurrently.
//
// Unlike Merge which takes a pre-defined slice of sequences, MergeAll processes sequences
// dynamically as they are produced by the outer sequence. This makes it ideal for scenarios
// where the number of sequences isn't known upfront or where sequences are generated on-the-fly.
//
// Type Parameters:
// - T: The type of elements in the inner sequences
//
// Parameters:
// - bufSize: The buffer size for the internal channel. Negative values are treated as 0 (unbuffered).
// A larger buffer allows more elements to be produced ahead of consumption,
// reducing contention between producers but using more memory.
//
// Returns:
// - Operator[Seq[T], T]: A function that takes a sequence of sequences and returns a flat sequence
//
// Behavior:
// - Spawns one goroutine for the outer sequence to iterate and spawn inner producers
// - Spawns one goroutine per inner sequence as it arrives from the outer sequence
// - Elements from different inner sequences are interleaved non-deterministically
// - Properly handles early termination: if the consumer stops iterating, all goroutines are cleaned up
// - The output channel is closed when both the outer sequence and all inner sequences are exhausted
// - No goroutines leak even with early termination
// - Thread-safe: multiple producers can safely send to the shared channel
//
// Example Usage:
//
// // Create a sequence of sequences dynamically
// outer := From(
// From(1, 2, 3),
// From(4, 5, 6),
// From(7, 8, 9),
// )
// mergeAll := MergeAll[int](10)
// merged := mergeAll(outer)
//
// // Elements appear in non-deterministic order
// for v := range merged {
// fmt.Println(v) // May print: 1, 4, 7, 2, 5, 8, 3, 6, 9 (order varies)
// }
//
// Example with Dynamic Generation:
//
// // Generate sequences on-the-fly
// outer := Map(func(n int) Seq[int] {
// return From(n, n*10, n*100)
// })(From(1, 2, 3))
// mergeAll := MergeAll[int](10)
// merged := mergeAll(outer)
//
// // Yields: 1, 10, 100, 2, 20, 200, 3, 30, 300 (order varies)
// for v := range merged {
// fmt.Println(v)
// }
//
// Example with Early Termination:
//
// outer := From(
// From(1, 2, 3, 4, 5),
// From(6, 7, 8, 9, 10),
// From(11, 12, 13, 14, 15),
// )
// mergeAll := MergeAll[int](5)
// merged := mergeAll(outer)
//
// // Stop after 5 elements - all goroutines will be properly cleaned up
// count := 0
// for v := range merged {
// fmt.Println(v)
// count++
// if count >= 5 {
// break
// }
// }
//
// Example with Chain:
//
// // Use with Chain to flatten nested sequences
// numbers := From(1, 2, 3)
// result := Chain(func(n int) Seq[int] {
// return From(n, n*10)
// })(numbers)
// // This is equivalent to: MergeAll[int](0)(Map(...)(numbers))
//
// See Also:
// - Merge: Merges a pre-defined slice of sequences
// - Chain: Sequentially flattens sequences (deterministic order)
// - Flatten: Flattens nested sequences sequentially
// - Async: Converts a single sequence to asynchronous
func MergeAll[T any](bufSize int) Operator[Seq[T], T] {
buf := N.Max(bufSize, 0)
return func(s Seq[Seq[T]]) Seq[T] {
return func(yield func(T) bool) {
ch := make(chan T, buf)
done := make(chan Void)
var wg sync.WaitGroup
// Outer producer: iterates the outer Seq and spawns an inner
// goroutine for each inner Seq it emits.
wg.Add(1)
go func() {
defer wg.Done()
s(func(inner Seq[T]) bool {
select {
case <-done:
return false
default:
}
wg.Add(1)
go func(seq Seq[T]) {
defer wg.Done()
seq(func(v T) bool {
select {
case ch <- v:
return true
case <-done:
return false
}
})
}(inner)
return true
})
}()
// Close ch once the outer producer and all inner producers finish.
go func() {
wg.Wait()
close(ch)
}()
// On exit, signal cancellation and drain so no producer blocks
// forever on `ch <- v`.
defer func() {
close(done)
for range ch {
}
}()
for v := range ch {
if !yield(v) {
return
}
}
}
}
}
// MergeMapBuf applies a function that returns a sequence to each element and merges the results concurrently.
// This is the concurrent version of Chain (flatMap), where each mapped sequence is processed in parallel
// rather than sequentially. It combines Map and MergeAll into a single operation.
//
// Unlike Chain which processes sequences sequentially (deterministic order), MergeMapBuf spawns a goroutine
// for each mapped sequence and merges their elements concurrently through a buffered channel. This makes
// it ideal for I/O-bound operations, parallel data processing, or when the order of results doesn't matter.
//
// Type Parameters:
// - A: The type of elements in the input sequence
// - B: The type of elements in the output sequences
//
// Parameters:
// - f: A function that transforms each input element into a sequence of output elements
// - bufSize: The buffer size for the internal channel. Negative values are treated as 0 (unbuffered).
// A larger buffer allows more elements to be produced ahead of consumption,
// reducing contention between producers but using more memory.
//
// Returns:
// - Operator[A, B]: A function that takes a sequence of A and returns a flat sequence of B
//
// Behavior:
// - Applies f to each element in the input sequence to produce inner sequences
// - Spawns one goroutine per inner sequence to produce elements concurrently
// - Elements from different inner sequences are interleaved non-deterministically
// - Properly handles early termination: if the consumer stops iterating, all goroutines are cleaned up
// - No goroutines leak even with early termination
// - Thread-safe: multiple producers can safely send to the shared channel
//
// Comparison with Chain:
// - Chain: Sequential processing, deterministic order, no concurrency overhead
// - MergeMapBuf: Concurrent processing, non-deterministic order, better for I/O-bound tasks
//
// Example Usage:
//
// // Expand each number into a sequence concurrently
// expand := MergeMapBuf(func(n int) Seq[int] {
// return From(n, n*10, n*100)
// }, 10)
// seq := From(1, 2, 3)
// result := expand(seq)
//
// // Yields: 1, 10, 100, 2, 20, 200, 3, 30, 300 (order varies)
// for v := range result {
// fmt.Println(v)
// }
//
// Example with I/O Operations:
//
// // Fetch data concurrently for each ID
// fetchData := MergeMapBuf(func(id int) Seq[string] {
// // Simulate I/O operation
// data := fetchFromAPI(id)
// return From(data...)
// }, 20)
// ids := From(1, 2, 3, 4, 5)
// results := fetchData(ids)
//
// // All fetches happen concurrently
// for data := range results {
// fmt.Println(data)
// }
//
// Example with Early Termination:
//
// expand := MergeMapBuf(func(n int) Seq[int] {
// return From(n, n*10, n*100)
// }, 5)
// seq := From(1, 2, 3, 4, 5)
// result := expand(seq)
//
// // Stop after 5 elements - all goroutines will be properly cleaned up
// count := 0
// for v := range result {
// fmt.Println(v)
// count++
// if count >= 5 {
// break
// }
// }
//
// Example with Unbuffered Channel:
//
// // bufSize of 0 creates an unbuffered channel
// expand := MergeMapBuf(func(n int) Seq[int] {
// return From(n, n*2)
// }, 0)
// seq := From(1, 2, 3)
// result := expand(seq)
//
// // Producers and consumer are synchronized
// for v := range result {
// fmt.Println(v)
// }
//
// See Also:
// - Chain: Sequential version (deterministic order)
// - MergeAll: Merges pre-existing sequences concurrently
// - Map: Transforms elements without flattening
// - Async: Converts a single sequence to asynchronous
func MergeMapBuf[A, B any](f func(A) Seq[B], bufSize int) Operator[A, B] {
return F.Flow2(
Map(f),
MergeAll[B](bufSize),
)
}
// MergeMap applies a function that returns a sequence to each element and merges the results concurrently using a default buffer size.
// This is a convenience wrapper around MergeMapBuf that uses a default buffer size of 8.
// It's the concurrent version of Chain (flatMap), where each mapped sequence is processed in parallel.
//
// Type Parameters:
// - A: The type of elements in the input sequence
// - B: The type of elements in the output sequences
//
// Parameters:
// - f: A function that transforms each input element into a sequence of output elements
//
// Returns:
// - Operator[A, B]: A function that takes a sequence of A and returns a flat sequence of B
//
// Behavior:
// - Uses a default buffer size of 8 for the internal channel
// - Applies f to each element in the input sequence to produce inner sequences
// - Spawns one goroutine per inner sequence to produce elements concurrently
// - Elements from different inner sequences are interleaved non-deterministically
// - Properly handles early termination with goroutine cleanup
// - Thread-safe: multiple producers can safely send to the shared channel
//
// Comparison with Chain:
// - Chain: Sequential processing, deterministic order, no concurrency overhead
// - MergeMap: Concurrent processing, non-deterministic order, better for I/O-bound tasks
//
// Example:
//
// // Expand each number into a sequence concurrently
// expand := MergeMap(func(n int) Seq[int] {
// return From(n, n*10, n*100)
// })
// seq := From(1, 2, 3)
// result := expand(seq)
//
// // Yields: 1, 10, 100, 2, 20, 200, 3, 30, 300 (order varies)
// for v := range result {
// fmt.Println(v)
// }
//
// See Also:
// - MergeMapBuf: MergeMap with custom buffer size
// - Chain: Sequential version (deterministic order)
// - MergeAll: Merges pre-existing sequences concurrently
// - Map: Transforms elements without flattening
func MergeMap[A, B any](f func(A) Seq[B]) Operator[A, B] {
return MergeMapBuf(f, defaultBufferSize)
}
File diff suppressed because it is too large Load Diff
+134 -1
View File
@@ -21,7 +21,13 @@ import (
)
// Monoid returns a Monoid instance for Seq[T].
// The monoid's concat operation concatenates sequences, and the empty value is an empty sequence.
// The monoid's concat operation concatenates sequences sequentially, and the empty value is an empty sequence.
//
// Marble Diagram:
//
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Concat: --1--2--3--4--5--6--|
//
// Example:
//
@@ -35,3 +41,130 @@ import (
func Monoid[T any]() M.Monoid[Seq[T]] {
return G.Monoid[Seq[T]]()
}
// ConcatMonoid returns a Monoid instance for Seq[T] that concatenates sequences sequentially.
// This is an alias for Monoid that makes the sequential concatenation behavior explicit.
//
// A Monoid is an algebraic structure with an associative binary operation (concat)
// and an identity element (empty). For sequences, the concat operation appends one
// sequence after another in deterministic order, and the identity is an empty sequence.
//
// This monoid is useful for functional composition patterns where you need to combine
// multiple sequences sequentially using monoid operations like Reduce, FoldMap, or when
// working with monadic operations that require a monoid instance.
//
// Marble Diagram (Sequential Concatenation):
//
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Concat: --1--2--3--4--5--6--|
// (deterministic order)
//
// Marble Diagram (vs MergeMonoid):
//
// ConcatMonoid:
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Result: --1--2--3--4--5--6--|
//
// MergeMonoid:
// Seq1: --1--2--3--|
// Seq2: --4--5--6--|
// Result: --1-4-2-5-3-6--|
// (non-deterministic)
//
// Type Parameters:
// - T: The type of elements in the sequences
//
// Returns:
// - Monoid[Seq[T]]: A monoid instance with:
// - Concat: Appends sequences sequentially (deterministic order)
// - Empty: Returns an empty sequence
//
// Properties:
// - Identity: concat(empty, x) = concat(x, empty) = x
// - Associativity: concat(concat(a, b), c) = concat(a, concat(b, c))
// - Deterministic: Elements always appear in the order of the input sequences
//
// Comparison with MergeMonoid:
//
// ConcatMonoid and MergeMonoid serve different purposes:
//
// - ConcatMonoid: Sequential concatenation
//
// - Order: Deterministic - elements from first sequence, then second, etc.
//
// - Concurrency: No concurrency - sequences are processed one after another
//
// - Performance: Lower overhead, no goroutines or channels
//
// - Use when: Order matters, no I/O operations, or simplicity is preferred
//
// - MergeMonoid: Concurrent merging
//
// - Order: Non-deterministic - elements interleaved based on timing
//
// - Concurrency: Spawns goroutines for each sequence
//
// - Performance: Better for I/O-bound operations, higher overhead for CPU-bound
//
// - Use when: Order doesn't matter, parallel I/O, or concurrent processing needed
//
// Example Usage:
//
// // Create a monoid for concatenating integer sequences
// monoid := ConcatMonoid[int]()
//
// // Use with Reduce to concatenate multiple sequences
// sequences := []Seq[int]{
// From(1, 2, 3),
// From(4, 5, 6),
// From(7, 8, 9),
// }
// concatenated := MonadReduce(From(sequences...), monoid.Concat, monoid.Empty)
// // yields: 1, 2, 3, 4, 5, 6, 7, 8, 9 (deterministic order)
//
// Example with Empty Identity:
//
// monoid := ConcatMonoid[int]()
// seq := From(1, 2, 3)
//
// // Concatenating with empty is identity
// result1 := monoid.Concat(monoid.Empty, seq) // same as seq
// result2 := monoid.Concat(seq, monoid.Empty) // same as seq
//
// Example with FoldMap:
//
// // Convert each number to a sequence and concatenate all results
// monoid := ConcatMonoid[int]()
// numbers := From(1, 2, 3)
// result := MonadFoldMap(numbers, func(n int) Seq[int] {
// return From(n, n*10, n*100)
// }, monoid)
// // yields: 1, 10, 100, 2, 20, 200, 3, 30, 300 (deterministic order)
//
// Example Comparing ConcatMonoid vs MergeMonoid:
//
// seq1 := From(1, 2, 3)
// seq2 := From(4, 5, 6)
//
// // ConcatMonoid: Sequential, deterministic
// concatMonoid := ConcatMonoid[int]()
// concat := concatMonoid.Concat(seq1, seq2)
// // Always yields: 1, 2, 3, 4, 5, 6
//
// // MergeMonoid: Concurrent, non-deterministic
// mergeMonoid := MergeMonoid[int](10)
// merged := mergeMonoid.Concat(seq1, seq2)
// // May yield: 1, 4, 2, 5, 3, 6 (order varies)
//
// See Also:
// - Monoid: The base monoid function (alias)
// - MergeMonoid: Concurrent merging monoid
// - MonadChain: Sequential flattening of sequences
// - Empty: Creates an empty sequence
//
//go:inline
func ConcatMonoid[T any]() M.Monoid[Seq[T]] {
return Monoid[T]()
}
+363
View File
@@ -0,0 +1,363 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package iter
import (
"fmt"
"slices"
"testing"
"github.com/stretchr/testify/assert"
)
func TestConcatMonoid_Identity(t *testing.T) {
t.Run("left identity", func(t *testing.T) {
monoid := ConcatMonoid[int]()
seq := From(1, 2, 3)
result := monoid.Concat(monoid.Empty(), seq)
collected := slices.Collect(result)
assert.Equal(t, []int{1, 2, 3}, collected)
})
t.Run("right identity", func(t *testing.T) {
monoid := ConcatMonoid[int]()
seq := From(1, 2, 3)
result := monoid.Concat(seq, monoid.Empty())
collected := slices.Collect(result)
assert.Equal(t, []int{1, 2, 3}, collected)
})
}
func TestConcatMonoid_Associativity(t *testing.T) {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2)
seq2 := From(3, 4)
seq3 := From(5, 6)
// (a + b) + c
left := monoid.Concat(monoid.Concat(seq1, seq2), seq3)
leftResult := slices.Collect(left)
// a + (b + c)
right := monoid.Concat(seq1, monoid.Concat(seq2, seq3))
rightResult := slices.Collect(right)
assert.Equal(t, leftResult, rightResult)
assert.Equal(t, []int{1, 2, 3, 4, 5, 6}, leftResult)
}
func TestConcatMonoid_DeterministicOrder(t *testing.T) {
t.Run("concatenates in deterministic order", func(t *testing.T) {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
seq3 := From(7, 8, 9)
result := monoid.Concat(monoid.Concat(seq1, seq2), seq3)
collected := slices.Collect(result)
// Order is always deterministic
assert.Equal(t, []int{1, 2, 3, 4, 5, 6, 7, 8, 9}, collected)
})
t.Run("multiple runs produce same order", func(t *testing.T) {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
// Run multiple times
results := make([][]int, 5)
for i := range 5 {
result := monoid.Concat(seq1, seq2)
results[i] = slices.Collect(result)
}
// All results should be identical
expected := []int{1, 2, 3, 4, 5, 6}
for i, result := range results {
assert.Equal(t, expected, result, "run %d should match", i)
}
})
}
func TestConcatMonoid_WithReduce(t *testing.T) {
monoid := ConcatMonoid[int]()
sequences := []Seq[int]{
From(1, 2, 3),
From(4, 5, 6),
From(7, 8, 9),
}
result := MonadReduce(From(sequences...), monoid.Concat, monoid.Empty())
collected := slices.Collect(result)
assert.Equal(t, []int{1, 2, 3, 4, 5, 6, 7, 8, 9}, collected)
}
func TestConcatMonoid_WithFoldMap(t *testing.T) {
monoid := ConcatMonoid[int]()
numbers := From(1, 2, 3)
result := MonadFoldMap(numbers, func(n int) Seq[int] {
return From(n, n*10, n*100)
}, monoid)
collected := slices.Collect(result)
// Deterministic order: each number's expansion in sequence
assert.Equal(t, []int{1, 10, 100, 2, 20, 200, 3, 30, 300}, collected)
}
func TestConcatMonoid_ComparisonWithMergeMonoid(t *testing.T) {
t.Run("ConcatMonoid is deterministic", func(t *testing.T) {
concatMonoid := ConcatMonoid[int]()
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
result := concatMonoid.Concat(seq1, seq2)
collected := slices.Collect(result)
// Always the same order
assert.Equal(t, []int{1, 2, 3, 4, 5, 6}, collected)
})
t.Run("MergeMonoid may be non-deterministic", func(t *testing.T) {
mergeMonoid := MergeMonoid[int](10)
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
result := mergeMonoid.Concat(seq1, seq2)
collected := slices.Collect(result)
// Contains all elements but order may vary
assert.ElementsMatch(t, []int{1, 2, 3, 4, 5, 6}, collected)
// Note: We can't assert exact order as it's non-deterministic
})
}
func TestConcatMonoid_EmptySequences(t *testing.T) {
t.Run("concatenating empty sequences", func(t *testing.T) {
monoid := ConcatMonoid[int]()
empty1 := Empty[int]()
empty2 := Empty[int]()
result := monoid.Concat(empty1, empty2)
collected := slices.Collect(result)
assert.Empty(t, collected)
})
t.Run("concatenating with empty in middle", func(t *testing.T) {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2)
empty := Empty[int]()
seq2 := From(3, 4)
result := monoid.Concat(monoid.Concat(seq1, empty), seq2)
collected := slices.Collect(result)
assert.Equal(t, []int{1, 2, 3, 4}, collected)
})
}
func TestConcatMonoid_WithComplexTypes(t *testing.T) {
type Person struct {
Name string
Age int
}
monoid := ConcatMonoid[Person]()
seq1 := From(Person{"Alice", 30}, Person{"Bob", 25})
seq2 := From(Person{"Charlie", 35}, Person{"Diana", 28})
result := monoid.Concat(seq1, seq2)
collected := slices.Collect(result)
expected := []Person{
{"Alice", 30},
{"Bob", 25},
{"Charlie", 35},
{"Diana", 28},
}
assert.Equal(t, expected, collected)
}
func BenchmarkConcatMonoid_TwoSequences(b *testing.B) {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2, 3, 4, 5)
seq2 := From(6, 7, 8, 9, 10)
b.ResetTimer()
for range b.N {
result := monoid.Concat(seq1, seq2)
for range result {
}
}
}
func BenchmarkConcatMonoid_Reduce(b *testing.B) {
monoid := ConcatMonoid[int]()
sequences := []Seq[int]{
From(1, 2, 3),
From(4, 5, 6),
From(7, 8, 9),
From(10, 11, 12),
}
b.ResetTimer()
for range b.N {
result := MonadReduce(From(sequences...), monoid.Concat, monoid.Empty())
for range result {
}
}
}
func BenchmarkConcatMonoid_VsMergeMonoid(b *testing.B) {
seq1 := From(1, 2, 3, 4, 5)
seq2 := From(6, 7, 8, 9, 10)
b.Run("ConcatMonoid", func(b *testing.B) {
monoid := ConcatMonoid[int]()
b.ResetTimer()
for range b.N {
result := monoid.Concat(seq1, seq2)
for range result {
}
}
})
b.Run("MergeMonoid", func(b *testing.B) {
monoid := MergeMonoid[int](10)
b.ResetTimer()
for range b.N {
result := monoid.Concat(seq1, seq2)
for range result {
}
}
})
}
func ExampleConcatMonoid() {
monoid := ConcatMonoid[int]()
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
result := monoid.Concat(seq1, seq2)
for v := range result {
fmt.Println(v)
}
// Output:
// 1
// 2
// 3
// 4
// 5
// 6
}
func ExampleConcatMonoid_identity() {
monoid := ConcatMonoid[int]()
seq := From(1, 2, 3)
// Left identity
result1 := monoid.Concat(monoid.Empty(), seq)
for v := range result1 {
fmt.Println(v)
}
// Right identity
result2 := monoid.Concat(seq, monoid.Empty())
for v := range result2 {
fmt.Println(v)
}
// Output:
// 1
// 2
// 3
// 1
// 2
// 3
}
func ExampleConcatMonoid_reduce() {
monoid := ConcatMonoid[int]()
sequences := []Seq[int]{
From(1, 2, 3),
From(4, 5, 6),
From(7, 8, 9),
}
result := MonadReduce(From(sequences...), monoid.Concat, monoid.Empty())
for v := range result {
fmt.Println(v)
}
// Output:
// 1
// 2
// 3
// 4
// 5
// 6
// 7
// 8
// 9
}
func ExampleConcatMonoid_comparison() {
seq1 := From(1, 2, 3)
seq2 := From(4, 5, 6)
// ConcatMonoid: Sequential, deterministic
concatMonoid := ConcatMonoid[int]()
concat := concatMonoid.Concat(seq1, seq2)
fmt.Println("ConcatMonoid (always same order):")
for v := range concat {
fmt.Println(v)
}
// MergeMonoid: Concurrent, non-deterministic
// Note: Output order may vary in actual runs
mergeMonoid := MergeMonoid[int](10)
merged := mergeMonoid.Concat(seq1, seq2)
fmt.Println("\nMergeMonoid (order may vary):")
collected := slices.Collect(merged)
// Sort for consistent test output
slices.Sort(collected)
for _, v := range collected {
fmt.Println(v)
}
// Output:
// ConcatMonoid (always same order):
// 1
// 2
// 3
// 4
// 5
// 6
//
// MergeMonoid (order may vary):
// 1
// 2
// 3
// 4
// 5
// 6
}
// Made with Bob
+23
View File
@@ -0,0 +1,23 @@
package generic
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
I "github.com/IBM/fp-go/v2/optics/iso"
)
// AsTraversal converts a iso to a traversal
func AsTraversal[R ~func(func(A) HKTA) func(S) HKTS, S, A, HKTS, HKTA any](
fmap functor.MapType[A, S, HKTA, HKTS],
) func(I.Iso[S, A]) R {
return func(sa I.Iso[S, A]) R {
saSet := fmap(sa.ReverseGet)
return func(f func(A) HKTA) func(S) HKTS {
return F.Flow3(
sa.Get,
f,
saSet,
)
}
}
}
+1 -1
View File
@@ -23,5 +23,5 @@ import (
)
func AsTraversal[E, S, A any]() func(L.Lens[S, A]) T.Traversal[E, S, A] {
return LG.AsTraversal[T.Traversal[E, S, A]](ET.MonadMap[E, A, S])
return LG.AsTraversal[T.Traversal[E, S, A]](ET.Map[E, A, S])
}
+10 -5
View File
@@ -16,19 +16,24 @@
package generic
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
L "github.com/IBM/fp-go/v2/optics/lens"
)
// AsTraversal converts a lens to a traversal
func AsTraversal[R ~func(func(A) HKTA) func(S) HKTS, S, A, HKTS, HKTA any](
fmap func(HKTA, func(A) S) HKTS,
fmap functor.MapType[A, S, HKTA, HKTS],
) func(L.Lens[S, A]) R {
return func(sa L.Lens[S, A]) R {
return func(f func(a A) HKTA) func(S) HKTS {
return func(f func(A) HKTA) func(S) HKTS {
return func(s S) HKTS {
return fmap(f(sa.Get(s)), func(a A) S {
return sa.Set(a)(s)
})
return F.Pipe1(
f(sa.Get(s)),
fmap(func(a A) S {
return sa.Set(a)(s)
}),
)
}
}
}
+1 -1
View File
@@ -60,5 +60,5 @@ import (
// configs := []Config{{Timeout: O.Some(30)}, {Timeout: O.None[int]()}}
// // Apply operations across all configs using the traversal
func AsTraversal[S, A any]() func(Lens[S, A]) T.Traversal[S, A] {
return LG.AsTraversal[T.Traversal[S, A]](O.MonadMap[A, S])
return LG.AsTraversal[T.Traversal[S, A]](O.Map[A, S])
}
@@ -0,0 +1,86 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package identity
import (
I "github.com/IBM/fp-go/v2/identity"
G "github.com/IBM/fp-go/v2/optics/lens/traversal/generic"
)
// Compose composes a lens with a traversal to create a new traversal.
//
// This function allows you to focus deeper into a data structure by first using
// a lens to access a field, then using a traversal to access multiple values within
// that field. The result is a traversal that can operate on all the nested values.
//
// The composition follows the pattern: Lens[S, A] → Traversal[A, B] → Traversal[S, B]
// where the lens focuses on field A within structure S, and the traversal focuses on
// multiple B values within A.
//
// Type Parameters:
// - S: The outer structure type
// - A: The intermediate field type (target of the lens)
// - B: The final focus type (targets of the traversal)
//
// Parameters:
// - t: A traversal that focuses on B values within A
//
// Returns:
// - A function that takes a Lens[S, A] and returns a Traversal[S, B]
//
// Example:
//
// import (
// F "github.com/IBM/fp-go/v2/function"
// "github.com/IBM/fp-go/v2/optics/lens"
// LT "github.com/IBM/fp-go/v2/optics/lens/traversal"
// AI "github.com/IBM/fp-go/v2/optics/traversal/array/identity"
// )
//
// type Team struct {
// Name string
// Members []string
// }
//
// // Lens to access the Members field
// membersLens := lens.MakeLens(
// func(t Team) []string { return t.Members },
// func(t Team, m []string) Team { t.Members = m; return t },
// )
//
// // Traversal for array elements
// arrayTraversal := AI.FromArray[string]()
//
// // Compose lens with traversal to access all member names
// memberTraversal := F.Pipe1(
// membersLens,
// LT.Compose[Team, []string, string](arrayTraversal),
// )
//
// team := Team{Name: "Engineering", Members: []string{"Alice", "Bob"}}
// // Uppercase all member names
// updated := memberTraversal(strings.ToUpper)(team)
// // updated.Members: ["ALICE", "BOB"]
//
// See Also:
// - Lens: A functional reference to a subpart of a data structure
// - Traversal: A functional reference to multiple subparts
// - traversal.Compose: Composes two traversals
func Compose[S, A, B any](t Traversal[A, B, A, B]) func(Lens[S, A]) Traversal[S, B, S, B] {
return G.Compose[S, A, B, S, A, B](
I.Map,
)(t)
}
@@ -0,0 +1,253 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package identity
import (
"strings"
"testing"
AR "github.com/IBM/fp-go/v2/array"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/optics/lens"
AI "github.com/IBM/fp-go/v2/optics/traversal/array/identity"
"github.com/stretchr/testify/assert"
)
type Team struct {
Name string
Members []string
}
type Company struct {
Name string
Teams []Team
}
func TestCompose_Success(t *testing.T) {
t.Run("composes lens with array traversal to modify nested values", func(t *testing.T) {
// Arrange
membersLens := lens.MakeLens(
func(team Team) []string { return team.Members },
func(team Team, members []string) Team {
team.Members = members
return team
},
)
arrayTraversal := AI.FromArray[string]()
memberTraversal := F.Pipe1(
membersLens,
Compose[Team](arrayTraversal),
)
team := Team{
Name: "Engineering",
Members: []string{"alice", "bob", "charlie"},
}
// Act - uppercase all member names
result := memberTraversal(strings.ToUpper)(team)
// Assert
expected := Team{
Name: "Engineering",
Members: []string{"ALICE", "BOB", "CHARLIE"},
}
assert.Equal(t, expected, result)
})
t.Run("composes lens with array traversal on empty array", func(t *testing.T) {
// Arrange
membersLens := lens.MakeLens(
func(team Team) []string { return team.Members },
func(team Team, members []string) Team {
team.Members = members
return team
},
)
arrayTraversal := AI.FromArray[string]()
memberTraversal := F.Pipe1(
membersLens,
Compose[Team](arrayTraversal),
)
team := Team{
Name: "Engineering",
Members: []string{},
}
// Act
result := memberTraversal(strings.ToUpper)(team)
// Assert
assert.Equal(t, team, result)
})
t.Run("composes lens with array traversal to transform numbers", func(t *testing.T) {
// Arrange
type Stats struct {
Name string
Scores []int
}
scoresLens := lens.MakeLens(
func(s Stats) []int { return s.Scores },
func(s Stats, scores []int) Stats {
s.Scores = scores
return s
},
)
arrayTraversal := AI.FromArray[int]()
scoreTraversal := F.Pipe1(
scoresLens,
Compose[Stats, []int, int](arrayTraversal),
)
stats := Stats{
Name: "Player1",
Scores: []int{10, 20, 30},
}
// Act - double all scores
result := scoreTraversal(func(n int) int { return n * 2 })(stats)
// Assert
expected := Stats{
Name: "Player1",
Scores: []int{20, 40, 60},
}
assert.Equal(t, expected, result)
})
}
func TestCompose_Integration(t *testing.T) {
t.Run("composes multiple lenses and traversals", func(t *testing.T) {
// Arrange - nested structure with Company -> Teams -> Members
teamsLens := lens.MakeLens(
func(c Company) []Team { return c.Teams },
func(c Company, teams []Team) Company {
c.Teams = teams
return c
},
)
// First compose: Company -> []Team -> Team
teamArrayTraversal := AI.FromArray[Team]()
companyToTeamTraversal := F.Pipe1(
teamsLens,
Compose[Company, []Team, Team](teamArrayTraversal),
)
// Second compose: Team -> []string -> string
membersLens := lens.MakeLens(
func(team Team) []string { return team.Members },
func(team Team, members []string) Team {
team.Members = members
return team
},
)
memberArrayTraversal := AI.FromArray[string]()
teamToMemberTraversal := F.Pipe1(
membersLens,
Compose[Team](memberArrayTraversal),
)
company := Company{
Name: "TechCorp",
Teams: []Team{
{Name: "Engineering", Members: []string{"alice", "bob"}},
{Name: "Design", Members: []string{"charlie", "diana"}},
},
}
// Act - uppercase all members in all teams
// First traverse to teams, then for each team traverse to members
result := companyToTeamTraversal(func(team Team) Team {
return teamToMemberTraversal(strings.ToUpper)(team)
})(company)
// Assert
expected := Company{
Name: "TechCorp",
Teams: []Team{
{Name: "Engineering", Members: []string{"ALICE", "BOB"}},
{Name: "Design", Members: []string{"CHARLIE", "DIANA"}},
},
}
assert.Equal(t, expected, result)
})
}
func TestCompose_EdgeCases(t *testing.T) {
t.Run("preserves structure name when modifying members", func(t *testing.T) {
// Arrange
membersLens := lens.MakeLens(
func(team Team) []string { return team.Members },
func(team Team, members []string) Team {
team.Members = members
return team
},
)
arrayTraversal := AI.FromArray[string]()
memberTraversal := F.Pipe1(
membersLens,
Compose[Team](arrayTraversal),
)
team := Team{
Name: "Engineering",
Members: []string{"alice"},
}
// Act
result := memberTraversal(strings.ToUpper)(team)
// Assert - Name should be unchanged
assert.Equal(t, "Engineering", result.Name)
assert.Equal(t, AR.From("ALICE"), result.Members)
})
t.Run("handles identity transformation", func(t *testing.T) {
// Arrange
membersLens := lens.MakeLens(
func(team Team) []string { return team.Members },
func(team Team, members []string) Team {
team.Members = members
return team
},
)
arrayTraversal := AI.FromArray[string]()
memberTraversal := F.Pipe1(
membersLens,
Compose[Team](arrayTraversal),
)
team := Team{
Name: "Engineering",
Members: []string{"alice", "bob"},
}
// Act - apply identity function
result := memberTraversal(F.Identity[string])(team)
// Assert - should be unchanged
assert.Equal(t, team, result)
})
}
@@ -0,0 +1,14 @@
package identity
import (
"github.com/IBM/fp-go/v2/optics/lens"
T "github.com/IBM/fp-go/v2/optics/traversal"
)
type (
// Lens is a functional reference to a subpart of a data structure.
Lens[S, A any] = lens.Lens[S, A]
Traversal[S, A, HKTS, HKTA any] = T.Traversal[S, A, HKTS, HKTA]
)
@@ -0,0 +1,25 @@
package generic
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
G "github.com/IBM/fp-go/v2/optics/lens/generic"
TG "github.com/IBM/fp-go/v2/optics/traversal/generic"
)
func Compose[S, A, B, HKTS, HKTA, HKTB any](
fmap functor.MapType[A, S, HKTA, HKTS],
) func(Traversal[A, B, HKTA, HKTB]) func(Lens[S, A]) Traversal[S, B, HKTS, HKTB] {
lensTrav := G.AsTraversal[Traversal[S, A, HKTS, HKTA]](fmap)
return func(ab Traversal[A, B, HKTA, HKTB]) func(Lens[S, A]) Traversal[S, B, HKTS, HKTB] {
return F.Flow2(
lensTrav,
TG.Compose[
Traversal[A, B, HKTA, HKTB],
Traversal[S, A, HKTS, HKTA],
Traversal[S, B, HKTS, HKTB],
](ab),
)
}
}
+14
View File
@@ -0,0 +1,14 @@
package generic
import (
"github.com/IBM/fp-go/v2/optics/lens"
T "github.com/IBM/fp-go/v2/optics/traversal"
)
type (
// Lens is a functional reference to a subpart of a data structure.
Lens[S, A any] = lens.Lens[S, A]
Traversal[S, A, HKTS, HKTA any] = T.Traversal[S, A, HKTS, HKTA]
)
+79
View File
@@ -0,0 +1,79 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package array
import (
OP "github.com/IBM/fp-go/v2/optics/optional"
G "github.com/IBM/fp-go/v2/optics/optional/array/generic"
)
// At creates an Optional that focuses on the element at a specific index in an array.
//
// This function returns an Optional that can get and set the element at the given index.
// If the index is out of bounds, GetOption returns None and Set operations are no-ops
// (the array is returned unchanged). This follows the Optional laws where operations
// on non-existent values have no effect.
//
// The Optional provides safe array access without panicking on invalid indices, making
// it ideal for functional transformations where you want to modify array elements only
// when they exist.
//
// Type Parameters:
// - A: The type of elements in the array
//
// Parameters:
// - idx: The zero-based index to focus on
//
// Returns:
// - An Optional that focuses on the element at the specified index
//
// Example:
//
// import (
// AR "github.com/IBM/fp-go/v2/array"
// OP "github.com/IBM/fp-go/v2/optics/optional"
// OA "github.com/IBM/fp-go/v2/optics/optional/array"
// )
//
// numbers := []int{10, 20, 30, 40}
//
// // Create an optional focusing on index 1
// second := OA.At[int](1)
//
// // Get the element at index 1
// value := second.GetOption(numbers)
// // value: option.Some(20)
//
// // Set the element at index 1
// updated := second.Set(25)(numbers)
// // updated: []int{10, 25, 30, 40}
//
// // Out of bounds access returns None
// outOfBounds := OA.At[int](10)
// value = outOfBounds.GetOption(numbers)
// // value: option.None[int]()
//
// // Out of bounds set is a no-op
// unchanged := outOfBounds.Set(99)(numbers)
// // unchanged: []int{10, 20, 30, 40} (original array)
//
// See Also:
// - AR.Lookup: Gets an element at an index, returning an Option
// - AR.UpdateAt: Updates an element at an index, returning an Option
// - OP.Optional: The Optional optic type
func At[A any](idx int) OP.Optional[[]A, A] {
return G.At[[]A](idx)
}
+466
View File
@@ -0,0 +1,466 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package array
import (
"testing"
EQ "github.com/IBM/fp-go/v2/eq"
F "github.com/IBM/fp-go/v2/function"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
// TestAt_GetOption tests the GetOption functionality
func TestAt_GetOption(t *testing.T) {
t.Run("returns Some for valid index", func(t *testing.T) {
numbers := []int{10, 20, 30, 40}
optional := At[int](1)
result := optional.GetOption(numbers)
assert.Equal(t, O.Some(20), result)
})
t.Run("returns Some for first element", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](0)
result := optional.GetOption(numbers)
assert.Equal(t, O.Some(10), result)
})
t.Run("returns Some for last element", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](2)
result := optional.GetOption(numbers)
assert.Equal(t, O.Some(30), result)
})
t.Run("returns None for negative index", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](-1)
result := optional.GetOption(numbers)
assert.Equal(t, O.None[int](), result)
})
t.Run("returns None for out of bounds index", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](10)
result := optional.GetOption(numbers)
assert.Equal(t, O.None[int](), result)
})
t.Run("returns None for empty array", func(t *testing.T) {
numbers := []int{}
optional := At[int](0)
result := optional.GetOption(numbers)
assert.Equal(t, O.None[int](), result)
})
t.Run("returns None for nil array", func(t *testing.T) {
var numbers []int
optional := At[int](0)
result := optional.GetOption(numbers)
assert.Equal(t, O.None[int](), result)
})
}
// TestAt_Set tests the Set functionality
func TestAt_Set(t *testing.T) {
t.Run("updates element at valid index", func(t *testing.T) {
numbers := []int{10, 20, 30, 40}
optional := At[int](1)
result := optional.Set(25)(numbers)
assert.Equal(t, []int{10, 25, 30, 40}, result)
assert.Equal(t, []int{10, 20, 30, 40}, numbers) // Original unchanged
})
t.Run("updates first element", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](0)
result := optional.Set(5)(numbers)
assert.Equal(t, []int{5, 20, 30}, result)
})
t.Run("updates last element", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](2)
result := optional.Set(35)(numbers)
assert.Equal(t, []int{10, 20, 35}, result)
})
t.Run("is no-op for negative index", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](-1)
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("is no-op for out of bounds index", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](10)
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("is no-op for empty array", func(t *testing.T) {
numbers := []int{}
optional := At[int](0)
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("is no-op for nil array", func(t *testing.T) {
var numbers []int
optional := At[int](0)
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
}
// TestAt_OptionalLaw1_GetSetNoOp tests Optional Law 1: GetSet Law (No-op on None)
// If GetOption(s) returns None, then Set(a)(s) must return s unchanged (no-op).
func TestAt_OptionalLaw1_GetSetNoOp(t *testing.T) {
t.Run("out of bounds index - set is no-op", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](10)
// Verify GetOption returns None
assert.Equal(t, O.None[int](), optional.GetOption(numbers))
// Set should be a no-op
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("negative index - set is no-op", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](-1)
// Verify GetOption returns None
assert.Equal(t, O.None[int](), optional.GetOption(numbers))
// Set should be a no-op
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("empty array - set is no-op", func(t *testing.T) {
numbers := []int{}
optional := At[int](0)
// Verify GetOption returns None
assert.Equal(t, O.None[int](), optional.GetOption(numbers))
// Set should be a no-op
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
t.Run("nil array - set is no-op", func(t *testing.T) {
var numbers []int
optional := At[int](0)
// Verify GetOption returns None
assert.Equal(t, O.None[int](), optional.GetOption(numbers))
// Set should be a no-op
result := optional.Set(99)(numbers)
assert.Equal(t, numbers, result)
})
}
// TestAt_OptionalLaw2_SetGet tests Optional Law 2: SetGet Law (Get what you Set)
// If GetOption(s) returns Some(_), then GetOption(Set(a)(s)) must return Some(a).
func TestAt_OptionalLaw2_SetGet(t *testing.T) {
t.Run("set then get returns the set value", func(t *testing.T) {
numbers := []int{10, 20, 30, 40}
optional := At[int](1)
// Verify GetOption returns Some (precondition)
assert.True(t, O.IsSome(optional.GetOption(numbers)))
// Set a new value
newValue := 25
updated := optional.Set(newValue)(numbers)
// GetOption on updated should return Some(newValue)
result := optional.GetOption(updated)
assert.Equal(t, O.Some(newValue), result)
})
t.Run("set first element then get", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](0)
assert.True(t, O.IsSome(optional.GetOption(numbers)))
newValue := 5
updated := optional.Set(newValue)(numbers)
result := optional.GetOption(updated)
assert.Equal(t, O.Some(newValue), result)
})
t.Run("set last element then get", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](2)
assert.True(t, O.IsSome(optional.GetOption(numbers)))
newValue := 35
updated := optional.Set(newValue)(numbers)
result := optional.GetOption(updated)
assert.Equal(t, O.Some(newValue), result)
})
t.Run("multiple indices satisfy law", func(t *testing.T) {
numbers := []int{10, 20, 30, 40, 50}
for i := range 5 {
optional := At[int](i)
assert.True(t, O.IsSome(optional.GetOption(numbers)))
newValue := i * 100
updated := optional.Set(newValue)(numbers)
result := optional.GetOption(updated)
assert.Equal(t, O.Some(newValue), result)
}
})
}
// TestAt_OptionalLaw3_SetSet tests Optional Law 3: SetSet Law (Last Set Wins)
// Setting twice is the same as setting once with the final value.
// Formally: Set(b)(Set(a)(s)) = Set(b)(s)
func TestAt_OptionalLaw3_SetSet(t *testing.T) {
eqSlice := EQ.FromEquals(func(a, b []int) bool {
if len(a) != len(b) {
return false
}
for i := range len(a) {
if a[i] != b[i] {
return false
}
}
return true
})
t.Run("setting twice equals setting once with final value", func(t *testing.T) {
numbers := []int{10, 20, 30, 40}
optional := At[int](1)
// Set twice: first to 25, then to 99
setTwice := F.Pipe2(
numbers,
optional.Set(25),
optional.Set(99),
)
// Set once with final value
setOnce := optional.Set(99)(numbers)
assert.True(t, eqSlice.Equals(setTwice, setOnce))
})
t.Run("multiple sets - last one wins", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](0)
// Set multiple times
result := F.Pipe4(
numbers,
optional.Set(1),
optional.Set(2),
optional.Set(3),
optional.Set(4),
)
// Should equal setting once with final value
expected := optional.Set(4)(numbers)
assert.True(t, eqSlice.Equals(result, expected))
})
t.Run("set twice on out of bounds - both no-ops", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](10)
// Set twice on out of bounds
setTwice := F.Pipe2(
numbers,
optional.Set(25),
optional.Set(99),
)
// Set once on out of bounds
setOnce := optional.Set(99)(numbers)
// Both should be no-ops, returning original
assert.True(t, eqSlice.Equals(setTwice, numbers))
assert.True(t, eqSlice.Equals(setOnce, numbers))
assert.True(t, eqSlice.Equals(setTwice, setOnce))
})
}
// TestAt_EdgeCases tests edge cases and boundary conditions
func TestAt_EdgeCases(t *testing.T) {
t.Run("single element array", func(t *testing.T) {
numbers := []int{42}
optional := At[int](0)
// Get
assert.Equal(t, O.Some(42), optional.GetOption(numbers))
// Set
updated := optional.Set(99)(numbers)
assert.Equal(t, []int{99}, updated)
// Out of bounds
outOfBounds := At[int](1)
assert.Equal(t, O.None[int](), outOfBounds.GetOption(numbers))
assert.Equal(t, numbers, outOfBounds.Set(99)(numbers))
})
t.Run("large array", func(t *testing.T) {
numbers := make([]int, 1000)
for i := range 1000 {
numbers[i] = i
}
optional := At[int](500)
// Get
assert.Equal(t, O.Some(500), optional.GetOption(numbers))
// Set
updated := optional.Set(9999)(numbers)
assert.Equal(t, 9999, updated[500])
assert.Equal(t, 500, numbers[500]) // Original unchanged
})
t.Run("works with different types", func(t *testing.T) {
// String array
strings := []string{"a", "b", "c"}
strOptional := At[string](1)
assert.Equal(t, O.Some("b"), strOptional.GetOption(strings))
assert.Equal(t, []string{"a", "x", "c"}, strOptional.Set("x")(strings))
// Bool array
bools := []bool{true, false, true}
boolOptional := At[bool](1)
assert.Equal(t, O.Some(false), boolOptional.GetOption(bools))
assert.Equal(t, []bool{true, true, true}, boolOptional.Set(true)(bools))
})
t.Run("preserves array capacity", func(t *testing.T) {
numbers := make([]int, 3, 10)
numbers[0], numbers[1], numbers[2] = 10, 20, 30
optional := At[int](1)
updated := optional.Set(25)(numbers)
assert.Equal(t, []int{10, 25, 30}, updated)
assert.Equal(t, 3, len(updated))
})
}
// TestAt_Integration tests integration scenarios
func TestAt_Integration(t *testing.T) {
t.Run("multiple optionals on same array", func(t *testing.T) {
numbers := []int{10, 20, 30, 40}
first := At[int](0)
second := At[int](1)
third := At[int](2)
// Update multiple indices
result := F.Pipe3(
numbers,
first.Set(1),
second.Set(2),
third.Set(3),
)
assert.Equal(t, []int{1, 2, 3, 40}, result)
assert.Equal(t, []int{10, 20, 30, 40}, numbers) // Original unchanged
})
t.Run("chaining operations", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](1)
// Get, verify, set, get again
original := optional.GetOption(numbers)
assert.Equal(t, O.Some(20), original)
updated := optional.Set(25)(numbers)
newValue := optional.GetOption(updated)
assert.Equal(t, O.Some(25), newValue)
// Original still unchanged
assert.Equal(t, O.Some(20), optional.GetOption(numbers))
})
t.Run("conditional update based on current value", func(t *testing.T) {
numbers := []int{10, 20, 30}
optional := At[int](1)
// Get current value and conditionally update
result := F.Pipe1(
optional.GetOption(numbers),
O.Fold(
func() []int { return numbers },
func(current int) []int {
if current > 15 {
return optional.Set(current * 2)(numbers)
}
return numbers
},
),
)
assert.Equal(t, []int{10, 40, 30}, result)
})
}
+98
View File
@@ -0,0 +1,98 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generic
import (
"fmt"
AR "github.com/IBM/fp-go/v2/array/generic"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/lazy"
OP "github.com/IBM/fp-go/v2/optics/optional"
O "github.com/IBM/fp-go/v2/option"
)
// At creates an Optional that focuses on the element at a specific index in an array.
//
// This function returns an Optional that can get and set the element at the given index.
// If the index is out of bounds, GetOption returns None and Set operations are no-ops
// (the array is returned unchanged). This follows the Optional laws where operations
// on non-existent values have no effect.
//
// The Optional provides safe array access without panicking on invalid indices, making
// it ideal for functional transformations where you want to modify array elements only
// when they exist.
//
// Type Parameters:
// - A: The type of elements in the array
//
// Parameters:
// - idx: The zero-based index to focus on
//
// Returns:
// - An Optional that focuses on the element at the specified index
//
// Example:
//
// import (
// AR "github.com/IBM/fp-go/v2/array"
// OP "github.com/IBM/fp-go/v2/optics/optional"
// OA "github.com/IBM/fp-go/v2/optics/optional/array"
// )
//
// numbers := []int{10, 20, 30, 40}
//
// // Create an optional focusing on index 1
// second := OA.At[int](1)
//
// // Get the element at index 1
// value := second.GetOption(numbers)
// // value: option.Some(20)
//
// // Set the element at index 1
// updated := second.Set(25)(numbers)
// // updated: []int{10, 25, 30, 40}
//
// // Out of bounds access returns None
// outOfBounds := OA.At[int](10)
// value = outOfBounds.GetOption(numbers)
// // value: option.None[int]()
//
// // Out of bounds set is a no-op
// unchanged := outOfBounds.Set(99)(numbers)
// // unchanged: []int{10, 20, 30, 40} (original array)
//
// See Also:
// - AR.Lookup: Gets an element at an index, returning an Option
// - AR.UpdateAt: Updates an element at an index, returning an Option
// - OP.Optional: The Optional optic type
func At[GA ~[]A, A any](idx int) OP.Optional[GA, A] {
lookup := AR.Lookup[GA](idx)
return OP.MakeOptionalCurriedWithName(
lookup,
func(a A) func(GA) GA {
update := AR.UpdateAt[GA](idx, a)
return func(as GA) GA {
return F.Pipe2(
as,
update,
O.GetOrElse(lazy.Of(as)),
)
}
},
fmt.Sprintf("At[%d]", idx),
)
}
+34
View File
@@ -0,0 +1,34 @@
package optional
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
"github.com/IBM/fp-go/v2/internal/pointed"
"github.com/IBM/fp-go/v2/lazy"
O "github.com/IBM/fp-go/v2/option"
)
func AsTraversal[R ~func(func(A) HKTA) func(S) HKTS, S, A, HKTS, HKTA any](
fof pointed.OfType[S, HKTS],
fmap functor.MapType[A, S, HKTA, HKTS],
) func(Optional[S, A]) R {
return func(sa Optional[S, A]) R {
return func(f func(A) HKTA) func(S) HKTS {
return func(s S) HKTS {
return F.Pipe2(
s,
sa.GetOption,
O.Fold(
lazy.Of(fof(s)),
F.Flow2(
f,
fmap(func(a A) S {
return sa.Set(a)(s)
}),
),
),
)
}
}
}
}
+4 -2
View File
@@ -310,8 +310,10 @@ func TestAsTraversal(t *testing.T) {
return Identity[Option[int]]{Value: s}
}
fmap := func(ia Identity[int], f func(int) Option[int]) Identity[Option[int]] {
return Identity[Option[int]]{Value: f(ia.Value)}
fmap := func(f func(int) Option[int]) func(Identity[int]) Identity[Option[int]] {
return func(ia Identity[int]) Identity[Option[int]] {
return Identity[Option[int]]{Value: f(ia.Value)}
}
}
type TraversalFunc func(func(int) Identity[int]) func(Option[int]) Identity[Option[int]]
+13 -11
View File
@@ -17,6 +17,9 @@ package prism
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
"github.com/IBM/fp-go/v2/internal/pointed"
"github.com/IBM/fp-go/v2/lazy"
O "github.com/IBM/fp-go/v2/option"
)
@@ -58,24 +61,23 @@ import (
// higher-kinded types and applicative functors. Most users will work
// directly with prisms rather than converting them to traversals.
func AsTraversal[R ~func(func(A) HKTA) func(S) HKTS, S, A, HKTS, HKTA any](
fof func(S) HKTS,
fmap func(HKTA, func(A) S) HKTS,
fof pointed.OfType[S, HKTS],
fmap functor.MapType[A, S, HKTA, HKTS],
) func(Prism[S, A]) R {
return func(sa Prism[S, A]) R {
return func(f func(a A) HKTA) func(S) HKTS {
return func(f func(A) HKTA) func(S) HKTS {
return func(s S) HKTS {
return F.Pipe2(
s,
sa.GetOption,
O.Fold(
// If prism doesn't match, return the original value lifted into HKTS
F.Nullary2(F.Constant(s), fof),
// If prism matches, apply f to the extracted value and map back
func(a A) HKTS {
return fmap(f(a), func(a A) S {
return prismModify(F.Constant1[A](a), sa, s)
})
},
lazy.Of(fof(s)),
F.Flow2(
f,
fmap(func(a A) S {
return Set[S](a)(sa)(s)
}),
),
),
)
}
+1 -1
View File
@@ -23,6 +23,6 @@ import (
)
// FromArray returns a traversal from an array for the identity [Monoid]
func FromArray[E, A any](m M.Monoid[E]) G.Traversal[[]A, A, C.Const[E, []A], C.Const[E, A]] {
func FromArray[A, E any](m M.Monoid[E]) G.Traversal[[]A, A, C.Const[E, []A], C.Const[E, A]] {
return AR.FromArray[[]A](m)
}
@@ -21,7 +21,51 @@ import (
G "github.com/IBM/fp-go/v2/optics/traversal/generic"
)
// FromArray returns a traversal from an array for the identity monad
// FromArray creates a traversal for array elements using the Identity functor.
//
// This is a specialized version of the generic FromArray that uses the Identity
// functor, which provides the simplest possible computational context (no context).
// This makes it ideal for straightforward array transformations where you want to
// modify elements directly without additional effects.
//
// The Identity functor means that operations are applied directly to values without
// wrapping them in any additional structure. This results in clean, efficient
// traversals that simply map functions over array elements.
//
// Type Parameters:
// - GA: Array type constraint (e.g., []A)
// - A: The element type within the array
//
// Returns:
// - A Traversal that can transform all elements in an array
//
// Example:
//
// import (
// F "github.com/IBM/fp-go/v2/function"
// T "github.com/IBM/fp-go/v2/optics/traversal"
// TI "github.com/IBM/fp-go/v2/optics/traversal/array/generic/identity"
// )
//
// // Create a traversal for integer arrays
// arrayTraversal := TI.FromArray[[]int, int]()
//
// // Compose with identity traversal
// traversal := F.Pipe1(
// T.Id[[]int, []int](),
// T.Compose[[]int, []int, []int, int](arrayTraversal),
// )
//
// // Double all numbers in the array
// numbers := []int{1, 2, 3, 4, 5}
// doubled := traversal(func(n int) int { return n * 2 })(numbers)
// // doubled: []int{2, 4, 6, 8, 10}
//
// See Also:
// - AR.FromArray: Generic version with configurable functor
// - I.Of: Identity functor's pure/of operation
// - I.Map: Identity functor's map operation
// - I.Ap: Identity functor's applicative operation
func FromArray[GA ~[]A, A any]() G.Traversal[GA, A, GA, A] {
return AR.FromArray[GA](
I.Of[GA],
@@ -29,3 +73,75 @@ func FromArray[GA ~[]A, A any]() G.Traversal[GA, A, GA, A] {
I.Ap[GA, A],
)
}
// At creates a function that focuses a traversal on a specific array index using the Identity functor.
//
// This is a specialized version of the generic At that uses the Identity functor,
// providing the simplest computational context for array element access. It transforms
// a traversal focusing on an array into a traversal focusing on the element at the
// specified index.
//
// The Identity functor means operations are applied directly without additional wrapping,
// making this ideal for straightforward element modifications. If the index is out of
// bounds, the traversal focuses on zero elements (no-op).
//
// Type Parameters:
// - GA: Array type constraint (e.g., []A)
// - S: The source type of the outer traversal
// - A: The element type within the array
//
// Parameters:
// - idx: The zero-based index to focus on
//
// Returns:
// - A function that transforms a traversal on arrays into a traversal on a specific element
//
// Example:
//
// import (
// F "github.com/IBM/fp-go/v2/function"
// T "github.com/IBM/fp-go/v2/optics/traversal"
// TI "github.com/IBM/fp-go/v2/optics/traversal/array/generic/identity"
// )
//
// type Person struct {
// Name string
// Hobbies []string
// }
//
// // Create a traversal focusing on hobbies
// hobbiesTraversal := T.Id[Person, []string]()
//
// // Focus on the second hobby (index 1)
// secondHobby := F.Pipe1(
// hobbiesTraversal,
// TI.At[[]string, Person, string](1),
// )
//
// // Modify the second hobby
// person := Person{Name: "Alice", Hobbies: []string{"reading", "coding", "gaming"}}
// updated := secondHobby(func(s string) string {
// return s + "!"
// })(person)
// // updated.Hobbies: []string{"reading", "coding!", "gaming"}
//
// // Out of bounds index is a no-op
// outOfBounds := F.Pipe1(
// hobbiesTraversal,
// TI.At[[]string, Person, string](10),
// )
// unchanged := outOfBounds(func(s string) string {
// return s + "!"
// })(person)
// // unchanged.Hobbies: []string{"reading", "coding", "gaming"} (no change)
//
// See Also:
// - AR.At: Generic version with configurable functor
// - I.Of: Identity functor's pure/of operation
// - I.Map: Identity functor's map operation
func At[GA ~[]A, S, A any](idx int) func(G.Traversal[S, GA, S, GA]) G.Traversal[S, A, S, A] {
return AR.At[GA, S, A, S](
I.Of[GA],
I.Map[A, GA],
)(idx)
}
+93 -7
View File
@@ -16,19 +16,105 @@
package generic
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/apply"
AR "github.com/IBM/fp-go/v2/internal/array"
"github.com/IBM/fp-go/v2/internal/functor"
"github.com/IBM/fp-go/v2/internal/pointed"
"github.com/IBM/fp-go/v2/optics/optional"
OA "github.com/IBM/fp-go/v2/optics/optional/array/generic"
G "github.com/IBM/fp-go/v2/optics/traversal/generic"
)
// FromArray returns a traversal from an array
func FromArray[GA ~[]A, GB ~[]B, A, B, HKTB, HKTAB, HKTRB any](
fof func(GB) HKTRB,
fmap func(func(GB) func(B) GB) func(HKTRB) HKTAB,
fap func(HKTB) func(HKTAB) HKTRB,
fof pointed.OfType[GB, HKTRB],
fmap functor.MapType[GB, func(B) GB, HKTRB, HKTAB],
fap apply.ApType[HKTB, HKTRB, HKTAB],
) G.Traversal[GA, A, HKTRB, HKTB] {
return func(f func(A) HKTB) func(s GA) HKTRB {
return func(s GA) HKTRB {
return AR.MonadTraverse(fof, fmap, fap, s, f)
}
return func(f func(A) HKTB) func(GA) HKTRB {
return AR.Traverse[GA](fof, fmap, fap, f)
}
}
// At creates a function that focuses a traversal on a specific array index.
//
// This function takes an index and returns a function that transforms a traversal
// focusing on an array into a traversal focusing on the element at that index.
// It works by:
// 1. Creating an Optional that focuses on the array element at the given index
// 2. Converting that Optional into a Traversal
// 3. Composing it with the original traversal
//
// If the index is out of bounds, the traversal will focus on zero elements (no-op),
// following the Optional laws where operations on non-existent values have no effect.
//
// This is particularly useful when you have a nested structure containing arrays
// and want to traverse to a specific element within those arrays.
//
// Type Parameters:
// - GA: Array type constraint (e.g., []A)
// - S: The source type of the outer traversal
// - A: The element type within the array
// - HKTS: Higher-kinded type for S (functor/applicative context)
// - HKTGA: Higher-kinded type for GA (functor/applicative context)
// - HKTA: Higher-kinded type for A (functor/applicative context)
//
// Parameters:
// - fof: Function to lift GA into the higher-kinded type HKTGA (pure/of operation)
// - fmap: Function to map over HKTA and produce HKTGA (functor map operation)
//
// Returns:
// - A function that takes an index and returns a traversal transformer
//
// Example:
//
// import (
// F "github.com/IBM/fp-go/v2/function"
// "github.com/IBM/fp-go/v2/identity"
// T "github.com/IBM/fp-go/v2/optics/traversal"
// TA "github.com/IBM/fp-go/v2/optics/traversal/array/generic"
// )
//
// type Person struct {
// Name string
// Hobbies []string
// }
//
// // Create a traversal focusing on the hobbies array
// hobbiesTraversal := T.Id[Person, []string]()
//
// // Focus on the first hobby (index 0)
// firstHobby := F.Pipe1(
// hobbiesTraversal,
// TA.At[[]string, Person, string](
// identity.Of[[]string],
// identity.Map[string, []string],
// )(0),
// )
//
// // Modify the first hobby
// person := Person{Name: "Alice", Hobbies: []string{"reading", "coding"}}
// updated := firstHobby(func(s string) string {
// return s + "!"
// })(person)
// // updated.Hobbies: []string{"reading!", "coding"}
//
// See Also:
// - OA.At: Creates an Optional focusing on an array element
// - optional.AsTraversal: Converts an Optional to a Traversal
// - G.Compose: Composes two traversals
func At[GA ~[]A, S, A, HKTS, HKTGA, HKTA any](
fof pointed.OfType[GA, HKTGA],
fmap functor.MapType[A, GA, HKTA, HKTGA],
) func(int) func(G.Traversal[S, GA, HKTS, HKTGA]) G.Traversal[S, A, HKTS, HKTA] {
return F.Flow3(
OA.At[GA],
optional.AsTraversal[G.Traversal[GA, A, HKTGA, HKTA]](fof, fmap),
G.Compose[
G.Traversal[GA, A, HKTGA, HKTA],
G.Traversal[S, GA, HKTS, HKTGA],
G.Traversal[S, A, HKTS, HKTA],
],
)
}
+79 -3
View File
@@ -18,7 +18,12 @@ package generic
import (
AR "github.com/IBM/fp-go/v2/array/generic"
C "github.com/IBM/fp-go/v2/constant"
"github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/functor"
"github.com/IBM/fp-go/v2/internal/pointed"
"github.com/IBM/fp-go/v2/optics/prism"
"github.com/IBM/fp-go/v2/predicate"
)
type (
@@ -47,7 +52,7 @@ func FromTraversable[
}
// FoldMap maps each target to a `Monoid` and combines the result
func FoldMap[M, S, A any](f func(A) M) func(sa Traversal[S, A, C.Const[M, S], C.Const[M, A]]) func(S) M {
func FoldMap[S, M, A any](f func(A) M) func(sa Traversal[S, A, C.Const[M, S], C.Const[M, A]]) func(S) M {
return func(sa Traversal[S, A, C.Const[M, S], C.Const[M, A]]) func(S) M {
return F.Flow2(
F.Pipe1(
@@ -61,13 +66,84 @@ func FoldMap[M, S, A any](f func(A) M) func(sa Traversal[S, A, C.Const[M, S], C.
// Fold maps each target to a `Monoid` and combines the result
func Fold[S, A any](sa Traversal[S, A, C.Const[A, S], C.Const[A, A]]) func(S) A {
return FoldMap[A, S](F.Identity[A])(sa)
return FoldMap[S](F.Identity[A])(sa)
}
// GetAll gets all the targets of a traversal
func GetAll[GA ~[]A, S, A any](s S) func(sa Traversal[S, A, C.Const[GA, S], C.Const[GA, A]]) GA {
fmap := FoldMap[GA, S](AR.Of[GA, A])
fmap := FoldMap[S](AR.Of[GA, A])
return func(sa Traversal[S, A, C.Const[GA, S], C.Const[GA, A]]) GA {
return fmap(sa)(s)
}
}
// Filter creates a function that filters the targets of a traversal based on a predicate.
//
// This function allows you to refine a traversal to only focus on values that satisfy
// a given predicate. It works by converting the predicate into a prism, then converting
// that prism into a traversal, and finally composing it with the original traversal.
//
// The filtering is selective: when modifying values through the filtered traversal,
// only values that satisfy the predicate will be transformed. Values that don't
// satisfy the predicate remain unchanged.
//
// Type Parameters:
// - S: The source type
// - A: The focus type (the values being filtered)
// - HKTS: Higher-kinded type for S (functor/applicative context)
// - HKTA: Higher-kinded type for A (functor/applicative context)
//
// Parameters:
// - fof: Function to lift A into the higher-kinded type HKTA (pure/of operation)
// - fmap: Function to map over HKTA (functor map operation)
//
// Returns:
// - A function that takes a predicate and returns an endomorphism on traversals
//
// Example:
//
// import (
// AR "github.com/IBM/fp-go/v2/array"
// F "github.com/IBM/fp-go/v2/function"
// "github.com/IBM/fp-go/v2/identity"
// N "github.com/IBM/fp-go/v2/number"
// AI "github.com/IBM/fp-go/v2/optics/traversal/array/identity"
// )
//
// // Create a traversal for array elements
// arrayTraversal := AI.FromArray[int]()
// baseTraversal := F.Pipe1(
// Id[[]int, []int](),
// Compose[[]int, []int, []int, int](arrayTraversal),
// )
//
// // Filter to only positive numbers
// isPositive := N.MoreThan(0)
// filteredTraversal := F.Pipe1(
// baseTraversal,
// Filter[[]int, int](identity.Of[int], identity.Map[int, int])(isPositive),
// )
//
// // Double only positive numbers
// numbers := []int{-2, -1, 0, 1, 2, 3}
// result := filteredTraversal(func(n int) int { return n * 2 })(numbers)
// // result: [-2, -1, 0, 2, 4, 6]
//
// See Also:
// - prism.FromPredicate: Creates a prism from a predicate
// - prism.AsTraversal: Converts a prism to a traversal
// - Compose: Composes two traversals
func Filter[
S, HKTS, A, HKTA any](
fof pointed.OfType[A, HKTA],
fmap functor.MapType[A, A, HKTA, HKTA],
) func(predicate.Predicate[A]) endomorphism.Endomorphism[Traversal[S, A, HKTS, HKTA]] {
return F.Flow3(
prism.FromPredicate,
prism.AsTraversal[Traversal[A, A, HKTA, HKTA]](fof, fmap),
Compose[
Traversal[A, A, HKTA, HKTA],
Traversal[S, A, HKTS, HKTA],
Traversal[S, A, HKTS, HKTA]],
)
}
+78 -14
View File
@@ -18,46 +18,110 @@ package traversal
import (
C "github.com/IBM/fp-go/v2/constant"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/identity"
"github.com/IBM/fp-go/v2/internal/functor"
"github.com/IBM/fp-go/v2/internal/pointed"
G "github.com/IBM/fp-go/v2/optics/traversal/generic"
)
// Id is the identity constructor of a traversal
func Id[S, A any]() G.Traversal[S, S, A, A] {
func Id[S, A any]() Traversal[S, S, A, A] {
return F.Identity[func(S) A]
}
// Modify applies a transformation function to a traversal
func Modify[S, A any](f func(A) A) func(sa G.Traversal[S, A, S, A]) func(S) S {
return func(sa G.Traversal[S, A, S, A]) func(S) S {
return sa(f)
}
func Modify[S, A any](f Endomorphism[A]) func(Traversal[S, A, S, A]) Endomorphism[S] {
return identity.Flap[Endomorphism[S]](f)
}
// Set sets a constant value for all values of the traversal
func Set[S, A any](a A) func(sa G.Traversal[S, A, S, A]) func(S) S {
func Set[S, A any](a A) func(Traversal[S, A, S, A]) Endomorphism[S] {
return Modify[S](F.Constant1[A](a))
}
// FoldMap maps each target to a `Monoid` and combines the result
func FoldMap[M, S, A any](f func(A) M) func(sa G.Traversal[S, A, C.Const[M, S], C.Const[M, A]]) func(S) M {
return G.FoldMap[M, S](f)
func FoldMap[S, M, A any](f func(A) M) func(sa Traversal[S, A, C.Const[M, S], C.Const[M, A]]) func(S) M {
return G.FoldMap[S](f)
}
// Fold maps each target to a `Monoid` and combines the result
func Fold[S, A any](sa G.Traversal[S, A, C.Const[A, S], C.Const[A, A]]) func(S) A {
func Fold[S, A any](sa Traversal[S, A, C.Const[A, S], C.Const[A, A]]) func(S) A {
return G.Fold(sa)
}
// GetAll gets all the targets of a traversal
func GetAll[S, A any](s S) func(sa G.Traversal[S, A, C.Const[[]A, S], C.Const[[]A, A]]) []A {
func GetAll[A, S any](s S) func(sa Traversal[S, A, C.Const[[]A, S], C.Const[[]A, A]]) []A {
return G.GetAll[[]A](s)
}
// Compose composes two traversables
func Compose[
S, A, B, HKTS, HKTA, HKTB any](ab G.Traversal[A, B, HKTA, HKTB]) func(sa G.Traversal[S, A, HKTS, HKTA]) G.Traversal[S, B, HKTS, HKTB] {
S, HKTS, A, B, HKTA, HKTB any](ab Traversal[A, B, HKTA, HKTB]) func(Traversal[S, A, HKTS, HKTA]) Traversal[S, B, HKTS, HKTB] {
return G.Compose[
G.Traversal[A, B, HKTA, HKTB],
G.Traversal[S, A, HKTS, HKTA],
G.Traversal[S, B, HKTS, HKTB]](ab)
Traversal[A, B, HKTA, HKTB],
Traversal[S, A, HKTS, HKTA],
Traversal[S, B, HKTS, HKTB]](ab)
}
// Filter creates a function that filters the targets of a traversal based on a predicate.
//
// This function allows you to refine a traversal to only focus on values that satisfy
// a given predicate. It works by converting the predicate into a prism, then converting
// that prism into a traversal, and finally composing it with the original traversal.
//
// The filtering is selective: when modifying values through the filtered traversal,
// only values that satisfy the predicate will be transformed. Values that don't
// satisfy the predicate remain unchanged.
//
// Type Parameters:
// - S: The source type
// - A: The focus type (the values being filtered)
// - HKTS: Higher-kinded type for S (functor/applicative context)
// - HKTA: Higher-kinded type for A (functor/applicative context)
//
// Parameters:
// - fof: Function to lift A into the higher-kinded type HKTA (pure/of operation)
// - fmap: Function to map over HKTA (functor map operation)
//
// Returns:
// - A function that takes a predicate and returns an endomorphism on traversals
//
// Example:
//
// import (
// AR "github.com/IBM/fp-go/v2/array"
// F "github.com/IBM/fp-go/v2/function"
// "github.com/IBM/fp-go/v2/identity"
// N "github.com/IBM/fp-go/v2/number"
// AI "github.com/IBM/fp-go/v2/optics/traversal/array/identity"
// )
//
// // Create a traversal for array elements
// arrayTraversal := AI.FromArray[int]()
// baseTraversal := F.Pipe1(
// Id[[]int, []int](),
// Compose[[]int, []int, []int, int](arrayTraversal),
// )
//
// // Filter to only positive numbers
// isPositive := N.MoreThan(0)
// filteredTraversal := F.Pipe1(
// baseTraversal,
// Filter[[]int, int](identity.Of[int], identity.Map[int, int])(isPositive),
// )
//
// // Double only positive numbers
// numbers := []int{-2, -1, 0, 1, 2, 3}
// result := filteredTraversal(func(n int) int { return n * 2 })(numbers)
// // result: [-2, -1, 0, 2, 4, 6]
//
// See Also:
// - prism.FromPredicate: Creates a prism from a predicate
// - prism.AsTraversal: Converts a prism to a traversal
// - Compose: Composes two traversals
func Filter[S, HKTS, A, HKTA any](
fof pointed.OfType[A, HKTA],
fmap functor.MapType[A, A, HKTA, HKTA],
) func(Predicate[A]) Endomorphism[Traversal[S, A, HKTS, HKTA]] {
return G.Filter[S, HKTS](fof, fmap)
}
+240 -5
View File
@@ -32,14 +32,14 @@ func TestGetAll(t *testing.T) {
as := AR.From(1, 2, 3)
tr := AT.FromArray[[]int, int](AR.Monoid[int]())
tr := AT.FromArray[int](AR.Monoid[int]())
sa := F.Pipe1(
Id[[]int, C.Const[[]int, []int]](),
Compose[[]int, []int, int, C.Const[[]int, []int]](tr),
Compose[[]int, C.Const[[]int, []int], []int, int](tr),
)
getall := GetAll[[]int, int](as)(sa)
getall := GetAll[int](as)(sa)
assert.Equal(t, AR.From(1, 2, 3), getall)
}
@@ -54,7 +54,7 @@ func TestFold(t *testing.T) {
sa := F.Pipe1(
Id[[]int, C.Const[int, []int]](),
Compose[[]int, []int, int, C.Const[int, []int]](tr),
Compose[[]int, C.Const[int, []int], []int, int](tr),
)
folded := Fold(sa)(as)
@@ -70,10 +70,245 @@ func TestTraverse(t *testing.T) {
sa := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int, int, []int](tr),
Compose[[]int, []int](tr),
)
res := sa(utils.Double)(as)
assert.Equal(t, AR.From(2, 4, 6), res)
}
func TestFilter_Success(t *testing.T) {
t.Run("filters and modifies only matching elements", func(t *testing.T) {
// Arrange
numbers := []int{-2, -1, 0, 1, 2, 3}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
// Filter to only positive numbers
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act - double only positive numbers
result := filteredTraversal(func(n int) int { return n * 2 })(numbers)
// Assert
assert.Equal(t, []int{-2, -1, 0, 2, 4, 6}, result)
})
t.Run("filters even numbers and triples them", func(t *testing.T) {
// Arrange
numbers := []int{1, 2, 3, 4, 5, 6}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
// Filter to only even numbers
isEven := func(n int) bool { return n%2 == 0 }
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isEven),
)
// Act
result := filteredTraversal(func(n int) int { return n * 3 })(numbers)
// Assert
assert.Equal(t, []int{1, 6, 3, 12, 5, 18}, result)
})
t.Run("filters strings by length", func(t *testing.T) {
// Arrange
words := []string{"a", "ab", "abc", "abcd", "abcde"}
arrayTraversal := AI.FromArray[string]()
baseTraversal := F.Pipe1(
Id[[]string, []string](),
Compose[[]string, []string, []string, string](arrayTraversal),
)
// Filter strings with length > 2
longerThanTwo := func(s string) bool { return len(s) > 2 }
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]string, []string, string, string](F.Identity[string], F.Identity[func(string) string])(longerThanTwo),
)
// Act - convert to uppercase
result := filteredTraversal(func(s string) string {
return s + "!"
})(words)
// Assert
assert.Equal(t, []string{"a", "ab", "abc!", "abcd!", "abcde!"}, result)
})
}
func TestFilter_EdgeCases(t *testing.T) {
t.Run("empty array returns empty array", func(t *testing.T) {
// Arrange
numbers := []int{}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act
result := filteredTraversal(utils.Double)(numbers)
// Assert
assert.Equal(t, []int{}, result)
})
t.Run("no elements match predicate", func(t *testing.T) {
// Arrange
numbers := []int{-5, -4, -3, -2, -1}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act
result := filteredTraversal(utils.Double)(numbers)
// Assert - all elements unchanged
assert.Equal(t, []int{-5, -4, -3, -2, -1}, result)
})
t.Run("all elements match predicate", func(t *testing.T) {
// Arrange
numbers := []int{1, 2, 3, 4, 5}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act
result := filteredTraversal(utils.Double)(numbers)
// Assert - all elements doubled
assert.Equal(t, []int{2, 4, 6, 8, 10}, result)
})
t.Run("single element matching", func(t *testing.T) {
// Arrange
numbers := []int{42}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act
result := filteredTraversal(utils.Double)(numbers)
// Assert
assert.Equal(t, []int{84}, result)
})
t.Run("single element not matching", func(t *testing.T) {
// Arrange
numbers := []int{-42}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isPositive := N.MoreThan(0)
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isPositive),
)
// Act
result := filteredTraversal(utils.Double)(numbers)
// Assert
assert.Equal(t, []int{-42}, result)
})
}
func TestFilter_Integration(t *testing.T) {
t.Run("multiple filters composed", func(t *testing.T) {
// Arrange
numbers := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
// Filter to only even numbers, then only those > 4
isEven := func(n int) bool { return n%2 == 0 }
greaterThanFour := N.MoreThan(4)
filteredTraversal := F.Pipe2(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isEven),
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(greaterThanFour),
)
// Act - add 100 to matching elements
result := filteredTraversal(func(n int) int { return n + 100 })(numbers)
// Assert - only 6, 8, 10 should be modified
assert.Equal(t, []int{1, 2, 3, 4, 5, 106, 7, 108, 9, 110}, result)
})
t.Run("filter with identity transformation", func(t *testing.T) {
// Arrange
numbers := []int{1, 2, 3, 4, 5}
arrayTraversal := AI.FromArray[int]()
baseTraversal := F.Pipe1(
Id[[]int, []int](),
Compose[[]int, []int](arrayTraversal),
)
isEven := func(n int) bool { return n%2 == 0 }
filteredTraversal := F.Pipe1(
baseTraversal,
Filter[[]int, []int](F.Identity[int], F.Identity[func(int) int])(isEven),
)
// Act - identity transformation
result := filteredTraversal(F.Identity[int])(numbers)
// Assert - array unchanged
assert.Equal(t, []int{1, 2, 3, 4, 5}, result)
})
}
+15
View File
@@ -0,0 +1,15 @@
package traversal
import (
"github.com/IBM/fp-go/v2/endomorphism"
G "github.com/IBM/fp-go/v2/optics/traversal/generic"
"github.com/IBM/fp-go/v2/predicate"
)
type (
Endomorphism[A any] = endomorphism.Endomorphism[A]
Traversal[S, A, HKTS, HKTA any] = G.Traversal[S, A, HKTS, HKTA]
Predicate[A any] = predicate.Predicate[A]
)