1
0
mirror of https://github.com/IBM/fp-go.git synced 2025-12-17 23:37:41 +02:00

Compare commits

...

42 Commits

Author SHA1 Message Date
Dr. Carsten Leue
20398e67a9 fix: better doc and implementation of retry
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-17 15:58:11 +01:00
Dr. Carsten Leue
fceda15701 doc: improve docs
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-17 10:11:58 +01:00
Dr. Carsten Leue
4ebfcadabe fix: add better tests
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-16 14:03:01 +01:00
Dr. Carsten Leue
acb601fc01 fix: reuse some more code
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-15 16:30:40 +01:00
Dr. Carsten Leue
d17663f016 fix: better doc
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-15 11:16:09 +01:00
Dr. Carsten Leue
829365fc24 doc: improve docs
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-12 13:30:10 +01:00
Dr. Carsten Leue
64b5660b4e doc: remove some comments
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-12 12:35:53 +01:00
Dr. Carsten Leue
16e82d6a65 fix: better cancellation support
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-12 11:52:43 +01:00
Dr. Carsten Leue
0d40fdcebb fix: implement tail recursion
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-12 11:18:32 +01:00
Dr. Carsten Leue
6a4dfa2c93 fix: better doc
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-11 16:18:55 +01:00
Dr. Carsten Leue
a37f379a3c fix: semantic of MapTo and ChainTo and update tests
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-11 09:09:44 +01:00
Dr. Carsten Leue
ece0cd135d fix: add more tests and logging
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-10 18:23:19 +01:00
Dr. Carsten Leue
739b6a284c fix: better slog based logging
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-09 17:52:57 +01:00
Dr. Carsten Leue
ba10d8d314 doc: fix docs
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-09 13:00:03 +01:00
Dr. Carsten Leue
3d6c419185 fix: add better logging
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-09 12:49:44 +01:00
Dr. Carsten Leue
3f4b6292e4 fix: optimize Traverse
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-05 21:35:05 +01:00
Dr. Carsten Leue
b1704b6d26 fix: implement TraverseReader
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-05 17:51:13 +01:00
Dr. Carsten Leue
ffdfd218f8 fix: implement Flip for Reader
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-05 11:04:49 +01:00
Dr. Carsten Leue
34826d8c52 fix: Ask and add tests to retry
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-04 16:47:53 +01:00
Dr. Carsten Leue
24c0519cc7 fix: try to unify type signatures
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-04 16:31:21 +01:00
Dr. Carsten Leue
ff48d8953e fix: implement some missing methods in reader io
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-04 13:50:25 +01:00
Dr. Carsten Leue
d739c9b277 fix: add doc to readerio
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-12-03 18:13:59 +01:00
Dr. Carsten Leue
f0054431a5 fix: add logging to readerio 2025-12-03 18:07:06 +01:00
Carsten Leue
1a89ec3df7 fix: implement Sequence for Pair
Signed-off-by: Carsten Leue <carsten.leue@de.ibm.com>
2025-11-28 11:22:23 +01:00
Carsten Leue
f652a94c3a fix: add template based logger
Signed-off-by: Carsten Leue <carsten.leue@de.ibm.com>
2025-11-28 10:11:08 +01:00
Dr. Carsten Leue
774db88ca5 fix: add name to prism
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-27 13:26:36 +01:00
Dr. Carsten Leue
62a3365b20 fix: add conversion prisms for numbers
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-27 13:12:18 +01:00
Dr. Carsten Leue
d9a16a6771 fix: add reduce operations to readerioresult
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-26 17:00:10 +01:00
Dr. Carsten Leue
8949cc7dca fix: expose stats
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-26 13:44:40 +01:00
Dr. Carsten Leue
fa6b6caf22 fix: generic order for reader.Flap
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-26 12:53:13 +01:00
Dr. Carsten Leue
a1e8d397c3 fix: better doc and some helpers
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-26 12:06:09 +01:00
Dr. Carsten Leue
dbe7102e43 fix: better doc and some helpers
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-26 12:05:31 +01:00
Dr. Carsten Leue
09aeb996e2 fix: add GetOrElseOf
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-24 18:57:30 +01:00
Dr. Carsten Leue
7cd575d95a fix: improve Prism and Optional
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-24 18:22:52 +01:00
Dr. Carsten Leue
dcfb023891 fix: improve assertions
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-24 17:28:48 +01:00
Dr. Carsten Leue
51cf241a26 fix: add ReaderK
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-24 12:29:55 +01:00
Dr. Carsten Leue
9004c93976 fix: add some idomatic helpers
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-24 10:40:58 +01:00
Dr. Carsten Leue
d8ab6b0ce5 fix: ChainReaderK
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-22 10:39:56 +01:00
Dr. Carsten Leue
4e9998b645 fix: benchmarks and better docs
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-21 15:39:41 +01:00
Dr. Carsten Leue
2ea9e292e1 fix: idiomatic/readeresult
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-21 15:25:59 +01:00
Dr. Carsten Leue
12a20e30d1 fix: implement BindReaderK
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-21 13:01:27 +01:00
Dr. Carsten Leue
4909ad5473 fix: add missing monoid
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-21 10:22:50 +01:00
406 changed files with 66431 additions and 5339 deletions

574
v2/DESIGN.md Normal file
View File

@@ -0,0 +1,574 @@
# Design Decisions
This document explains the key design decisions and principles behind fp-go's API design.
## Table of Contents
- [Data Last Principle](#data-last-principle)
- [Kleisli and Operator Types](#kleisli-and-operator-types)
- [Monadic Operations Comparison](#monadic-operations-comparison)
- [Type Parameter Ordering](#type-parameter-ordering)
- [Generic Type Aliases](#generic-type-aliases)
## Data Last Principle
fp-go follows the **"data last"** principle, where the data being operated on is always the last parameter in a function. This design choice enables powerful function composition and partial application patterns.
### What is "Data Last"?
In the "data last" style, functions are structured so that:
1. Configuration parameters come first
2. The data to be transformed comes last
This is the opposite of the traditional object-oriented style where the data (receiver) comes first.
### Why "Data Last"?
The "data last" principle enables:
1. **Natural Currying**: Functions can be partially applied to create specialized transformations
2. **Function Composition**: Operations can be composed before applying them to data
3. **Point-Free Style**: Write transformations without explicitly mentioning the data
4. **Reusability**: Create reusable transformation pipelines
### Examples
#### Basic Transformation
```go
// Data last style (fp-go)
double := array.Map(number.Mul(2))
result := double([]int{1, 2, 3}) // [2, 4, 6]
// Compare with data first style (traditional)
result := array.Map([]int{1, 2, 3}, number.Mul(2))
```
#### Function Composition
```go
import (
A "github.com/IBM/fp-go/v2/array"
F "github.com/IBM/fp-go/v2/function"
N "github.com/IBM/fp-go/v2/number"
)
// Create a pipeline of transformations
pipeline := F.Flow3(
A.Filter(func(x int) bool { return x > 0 }), // Keep positive numbers
A.Map(N.Mul(2)), // Double each number
A.Reduce(func(acc, x int) int { return acc + x }, 0), // Sum them up
)
// Apply the pipeline to different data
result1 := pipeline([]int{-1, 2, 3, -4, 5}) // (2 + 3 + 5) * 2 = 20
result2 := pipeline([]int{1, 2, 3}) // (1 + 2 + 3) * 2 = 12
```
#### Partial Application
```go
import (
O "github.com/IBM/fp-go/v2/option"
)
// Create specialized functions by partial application
getOrZero := O.GetOrElse(func() int { return 0 })
getOrEmpty := O.GetOrElse(func() string { return "" })
// Use them with different data
value1 := getOrZero(O.Some(42)) // 42
value2 := getOrZero(O.None[int]()) // 0
text1 := getOrEmpty(O.Some("hello")) // "hello"
text2 := getOrEmpty(O.None[string]()) // ""
```
#### Building Reusable Transformations
```go
import (
E "github.com/IBM/fp-go/v2/either"
O "github.com/IBM/fp-go/v2/option"
)
// Create a reusable validation pipeline
type User struct {
Name string
Email string
Age int
}
validateAge := E.FromPredicate(
func(u User) bool { return u.Age >= 18 },
func(u User) error { return errors.New("must be 18 or older") },
)
validateEmail := E.FromPredicate(
func(u User) bool { return strings.Contains(u.Email, "@") },
func(u User) error { return errors.New("invalid email") },
)
// Compose validators
validateUser := F.Flow2(
validateAge,
E.Chain(validateEmail),
)
// Apply to different users
result1 := validateUser(User{Name: "Alice", Email: "alice@example.com", Age: 25})
result2 := validateUser(User{Name: "Bob", Email: "invalid", Age: 30})
```
#### Monadic Operations
```go
import (
O "github.com/IBM/fp-go/v2/option"
)
// Data last enables clean monadic chains
parseAndDouble := F.Flow2(
O.FromPredicate(func(s string) bool { return s != "" }),
O.Chain(func(s string) O.Option[int] {
n, err := strconv.Atoi(s)
if err != nil {
return O.None[int]()
}
return O.Some(n * 2)
}),
)
result1 := parseAndDouble("21") // Some(42)
result2 := parseAndDouble("") // None
result3 := parseAndDouble("abc") // None
```
### Monadic vs Non-Monadic Forms
fp-go provides two forms for most operations:
1. **Curried form** (data last): Returns a function that can be composed
2. **Monadic form** (data first): Takes all parameters at once
```go
// Curried form - data last, returns a function
Map[A, B any](f func(A) B) func(Option[A]) Option[B]
// Monadic form - data first, direct execution
MonadMap[A, B any](fa Option[A], f func(A) B) Option[B]
```
**When to use each:**
- **Curried form**: When building pipelines, composing functions, or creating reusable transformations
- **Monadic form**: When you have all parameters available and want direct execution
```go
// Curried form - building a pipeline
transform := F.Flow3(
O.Map(strings.ToUpper),
O.Filter(func(s string) bool { return len(s) > 3 }),
O.GetOrElse(func() string { return "DEFAULT" }),
)
result := transform(O.Some("hello"))
// Monadic form - direct execution
result := O.MonadMap(O.Some("hello"), strings.ToUpper)
```
### Further Reading on Data-Last Pattern
The data-last currying pattern is well-documented in the functional programming community:
- [Mostly Adequate Guide - Ch. 4: Currying](https://mostly-adequate.gitbook.io/mostly-adequate-guide/ch04) - Excellent introduction with clear examples
- [Curry and Function Composition](https://medium.com/javascript-scene/curry-and-function-composition-2c208d774983) by Eric Elliott
- [fp-ts Issue #1238](https://github.com/gcanti/fp-ts/issues/1238) - Real-world examples of data-last refactoring
## Kleisli and Operator Types
fp-go uses consistent type aliases across all monads to make code more recognizable and composable. These types provide a common vocabulary that works across different monadic contexts.
### Type Definitions
```go
// Kleisli arrow - a function that returns a monadic value
type Kleisli[A, B any] = func(A) M[B]
// Operator - a function that transforms a monadic value
type Operator[A, B any] = func(M[A]) M[B]
```
Where `M` represents the specific monad (Option, Either, IO, etc.).
### Why These Types Matter
1. **Consistency**: The same type names appear across all monads
2. **Recognizability**: Experienced functional programmers immediately understand the intent
3. **Composability**: Functions with these types compose naturally
4. **Documentation**: Type signatures clearly communicate the operation's behavior
### Examples Across Monads
#### Option Monad
```go
// option/option.go
type Kleisli[A, B any] = func(A) Option[B]
type Operator[A, B any] = func(Option[A]) Option[B]
// Chain uses Kleisli
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B]
// Map returns an Operator
func Map[A, B any](f func(A) B) Operator[A, B]
```
#### Either Monad
```go
// either/either.go
type Kleisli[E, A, B any] = func(A) Either[E, B]
type Operator[E, A, B any] = func(Either[E, A]) Either[E, B]
// Chain uses Kleisli
func Chain[E, A, B any](f Kleisli[E, A, B]) Operator[E, A, B]
// Map returns an Operator
func Map[E, A, B any](f func(A) B) Operator[E, A, B]
```
#### IO Monad
```go
// io/io.go
type Kleisli[A, B any] = func(A) IO[B]
type Operator[A, B any] = func(IO[A]) IO[B]
// Chain uses Kleisli
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B]
// Map returns an Operator
func Map[A, B any](f func(A) B) Operator[A, B]
```
#### Array (List Monad)
```go
// array/array.go
type Kleisli[A, B any] = func(A) []B
type Operator[A, B any] = func([]A) []B
// Chain uses Kleisli
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B]
// Map returns an Operator
func Map[A, B any](f func(A) B) Operator[A, B]
```
### Pattern Recognition
Once you learn these patterns in one monad, you can apply them to all monads:
```go
// The pattern is always the same, just the monad changes
// Option
validateAge := option.Chain(func(user User) option.Option[User] {
if user.Age >= 18 {
return option.Some(user)
}
return option.None[User]()
})
// Either
validateAge := either.Chain(func(user User) either.Either[error, User] {
if user.Age >= 18 {
return either.Right[error](user)
}
return either.Left[User](errors.New("too young"))
})
// IO
validateAge := io.Chain(func(user User) io.IO[User] {
return io.Of(user) // Always succeeds in IO
})
// Array
validateAge := array.Chain(func(user User) []User {
if user.Age >= 18 {
return []User{user}
}
return []User{} // Empty array = failure
})
```
### Composing Kleisli Arrows
Kleisli arrows compose naturally using monadic composition:
```go
import (
O "github.com/IBM/fp-go/v2/option"
F "github.com/IBM/fp-go/v2/function"
)
// Define Kleisli arrows
parseAge := func(s string) O.Option[int] {
n, err := strconv.Atoi(s)
if err != nil {
return O.None[int]()
}
return O.Some(n)
}
validateAge := func(age int) O.Option[int] {
if age >= 18 {
return O.Some(age)
}
return O.None[int]()
}
formatAge := func(age int) O.Option[string] {
return O.Some(fmt.Sprintf("Age: %d", age))
}
// Compose them using Flow and Chain
pipeline := F.Flow3(
parseAge,
O.Chain(validateAge),
O.Chain(formatAge),
)
result := pipeline("25") // Some("Age: 25")
result := pipeline("15") // None (too young)
result := pipeline("abc") // None (parse error)
```
### Building Reusable Operators
Operators can be created once and reused across your codebase:
```go
import (
E "github.com/IBM/fp-go/v2/either"
)
// Create reusable operators
type ValidationError struct {
Field string
Message string
}
// Reusable validation operators
validateNonEmpty := E.Chain(func(s string) E.Either[ValidationError, string] {
if s == "" {
return E.Left[string](ValidationError{
Field: "input",
Message: "cannot be empty",
})
}
return E.Right[ValidationError](s)
})
validateEmail := E.Chain(func(s string) E.Either[ValidationError, string] {
if !strings.Contains(s, "@") {
return E.Left[string](ValidationError{
Field: "email",
Message: "invalid format",
})
}
return E.Right[ValidationError](s)
})
// Compose operators
validateEmailInput := F.Flow2(
validateNonEmpty,
validateEmail,
)
// Use across your application
result1 := validateEmailInput(E.Right[ValidationError]("user@example.com"))
result2 := validateEmailInput(E.Right[ValidationError](""))
result3 := validateEmailInput(E.Right[ValidationError]("invalid"))
```
### Benefits of Consistent Naming
1. **Cross-monad understanding**: Learn once, apply everywhere
2. **Easier refactoring**: Changing monads requires minimal code changes
3. **Better tooling**: IDEs can provide better suggestions
4. **Team communication**: Shared vocabulary across the team
5. **Library integration**: Third-party libraries follow the same patterns
### Identity Monad - The Simplest Case
The Identity monad shows these types in their simplest form:
```go
// identity/doc.go
type Operator[A, B any] = func(A) B
// In Identity, there's no wrapping, so:
// - Kleisli[A, B] is just func(A) B
// - Operator[A, B] is just func(A) B
// They're the same because Identity adds no context
```
This demonstrates that these type aliases represent fundamental functional programming concepts, not just arbitrary naming conventions.
## Monadic Operations Comparison
fp-go's monadic operations are inspired by functional programming languages and libraries. Here's how they compare:
| fp-go | fp-ts | Haskell | Scala | Description |
|-------|-------|---------|-------|-------------|
| `Map` | `map` | `fmap` | `map` | Functor mapping - transforms the value inside a context |
| `Chain` | `chain` | `>>=` (bind) | `flatMap` | Monadic bind - chains computations that return wrapped values |
| `Ap` | `ap` | `<*>` | `ap` | Applicative apply - applies a wrapped function to a wrapped value |
| `Of` | `of` | `return`/`pure` | `pure` | Lifts a pure value into a monadic context |
| `Fold` | `fold` | `either` | `fold` | Eliminates the context by providing handlers for each case |
| `Filter` | `filter` | `mfilter` | `filter` | Keeps values that satisfy a predicate |
| `Flatten` | `flatten` | `join` | `flatten` | Removes one level of nesting |
| `ChainFirst` | `chainFirst` | `>>` (then) | `tap` | Chains for side effects, keeping the original value |
| `Alt` | `alt` | `<\|>` | `orElse` | Provides an alternative value if the first fails |
| `GetOrElse` | `getOrElse` | `fromMaybe` | `getOrElse` | Extracts the value or provides a default |
| `FromPredicate` | `fromPredicate` | `guard` | `filter` | Creates a monadic value based on a predicate |
| `Sequence` | `sequence` | `sequence` | `sequence` | Transforms a collection of effects into an effect of a collection |
| `Traverse` | `traverse` | `traverse` | `traverse` | Maps and sequences in one operation |
| `Reduce` | `reduce` | `foldl` | `foldLeft` | Folds a structure from left to right |
| `ReduceRight` | `reduceRight` | `foldr` | `foldRight` | Folds a structure from right to left |
### Key Differences from Other Languages
#### Naming Conventions
- **Go conventions**: fp-go uses PascalCase for exported functions (e.g., `Map`, `Chain`) following Go's naming conventions
- **Type parameters first**: Non-inferrable type parameters come first (e.g., `Ap[B, E, A any]`)
- **Monadic prefix**: Direct execution forms use the `Monad` prefix (e.g., `MonadMap`, `MonadChain`)
#### Type System
```go
// fp-go (explicit type parameters when needed)
result := option.Map(transform)(value)
result := option.Map[string, int](transform)(value) // explicit when inference fails
// Haskell (type inference)
result = fmap transform value
// Scala (type inference with method syntax)
result = value.map(transform)
// fp-ts (TypeScript type inference)
const result = pipe(value, map(transform))
```
#### Currying
```go
// fp-go - explicit currying with data last
double := array.Map(number.Mul(2))
result := double(numbers)
// Haskell - automatic currying
double = fmap (*2)
result = double numbers
// Scala - method syntax
result = numbers.map(_ * 2)
```
## Type Parameter Ordering
fp-go v2 uses a specific ordering for type parameters to maximize type inference:
### Rule: Non-Inferrable Parameters First
Type parameters that **cannot be inferred** from function arguments come first. This allows the Go compiler to infer as many types as possible.
```go
// Ap - B cannot be inferred from arguments, so it comes first
func Ap[B, E, A any](fa Either[E, A]) func(Either[E, func(A) B]) Either[E, B]
// Usage - only B needs to be specified
result := either.Ap[string](value)(funcInEither)
```
### Examples
```go
// Map - all types can be inferred from arguments
func Map[E, A, B any](f func(A) B) func(Either[E, A]) Either[E, B]
// Usage - no type parameters needed
result := either.Map(transform)(value)
// Chain - all types can be inferred
func Chain[E, A, B any](f func(A) Either[E, B]) func(Either[E, A]) Either[E, B]
// Usage - no type parameters needed
result := either.Chain(validator)(value)
// Of - E cannot be inferred, comes first
func Of[E, A any](value A) Either[E, A]
// Usage - only E needs to be specified
result := either.Of[error](42)
```
### Benefits
1. **Less verbose code**: Most operations don't require explicit type parameters
2. **Better IDE support**: Type inference provides better autocomplete
3. **Clearer intent**: Only specify types that can't be inferred
## Generic Type Aliases
fp-go v2 leverages Go 1.24's generic type aliases for cleaner type definitions:
```go
// V2 - using generic type alias (requires Go 1.24+)
type ReaderIOEither[R, E, A any] = RD.Reader[R, IOE.IOEither[E, A]]
// V1 - using type definition (Go 1.18+)
type ReaderIOEither[R, E, A any] RD.Reader[R, IOE.IOEither[E, A]]
```
### Benefits
1. **True aliases**: The type is interchangeable with its definition
2. **No namespace imports needed**: Can use types directly without package prefixes
3. **Simpler codebase**: Eliminates the need for `generic` subpackages
4. **Better composability**: Types compose more naturally
### Migration Pattern
```go
// Define project-wide aliases once
package types
import (
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/result"
"github.com/IBM/fp-go/v2/ioresult"
)
type Option[A any] = option.Option[A]
type Result[A any] = result.Result[A]
type IOResult[A any] = ioresult.IOResult[A]
// Use throughout your codebase
package myapp
import "myproject/types"
func process(input string) types.Result[types.Option[int]] {
// implementation
}
```
---
For more information, see:
- [README.md](./README.md) - Overview and quick start
- [API Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2) - Complete API reference
- [Samples](./samples/) - Practical examples

View File

@@ -0,0 +1,212 @@
# Example Tests Progress
This document tracks the progress of converting documentation examples into executable example test files.
## Overview
The codebase has 300+ documentation examples across many packages. This document tracks which packages have been completed and which still need work.
## Completed Packages
### Core Packages
- [x] **result** - Created `examples_bind_test.go`, `examples_curry_test.go`, `examples_apply_test.go`
- Files: `bind.go` (10 examples), `curry.go` (5 examples), `apply.go` (2 examples)
- Status: ✅ 17 tests passing
### Utility Packages
- [x] **pair** - Created `examples_test.go`
- Files: `pair.go` (14 examples)
- Status: ✅ 14 tests passing
- [x] **tuple** - Created `examples_test.go`
- Files: `tuple.go` (6 examples)
- Status: ✅ 6 tests passing
### Type Class Packages
- [x] **semigroup** - Created `examples_test.go`
- Files: `semigroup.go` (7 examples)
- Status: ✅ 7 tests passing
### Utility Packages (continued)
- [x] **predicate** - Created `examples_test.go`
- Files: `bool.go` (3 examples), `contramap.go` (1 example)
- Status: ✅ 4 tests passing
### Context Reader Packages
- [x] **idiomatic/context/readerresult** - Created `examples_reader_test.go`, `examples_bind_test.go`
- Files: `reader.go` (8 examples), `bind.go` (14 examples)
- Status: ✅ 22 tests passing
## Summary Statistics
- **Total Example Tests Created**: 74
- **Total Packages Completed**: 7 (result, pair, tuple, semigroup, predicate, idiomatic/context/readerresult)
- **All Tests Status**: ✅ PASSING
### Breakdown by Package
- **result**: 21 tests (bind: 10, curry: 5, apply: 2, array: 4)
- **pair**: 14 tests
- **tuple**: 6 tests
- **semigroup**: 7 tests
- **predicate**: 4 tests
- **idiomatic/context/readerresult**: 22 tests (reader: 8, bind: 14)
## Packages with Existing Examples
These packages already have some example test files:
- result (has `examples_create_test.go`, `examples_extract_test.go`)
- option (has `examples_create_test.go`, `examples_extract_test.go`)
- either (has `examples_create_test.go`, `examples_extract_test.go`)
- ioeither (has `examples_create_test.go`, `examples_do_test.go`, `examples_extract_test.go`)
- ioresult (has `examples_create_test.go`, `examples_do_test.go`, `examples_extract_test.go`)
- lazy (has `example_lazy_test.go`)
- array (has `examples_basic_test.go`, `examples_sort_test.go`, `example_any_test.go`, `example_find_test.go`)
- readerioeither (has `traverse_example_test.go`)
- context/readerioresult (has `flip_example_test.go`)
## Packages Needing Example Tests
### Core Packages (High Priority)
- [ ] **result** - Additional files need examples:
- `apply.go` (2 examples)
- `array.go` (7 examples)
- `core.go` (6 examples)
- `either.go` (26 examples)
- `eq.go` (2 examples)
- `functor.go` (1 example)
- [ ] **option** - Additional files need examples
- [ ] **either** - Additional files need examples
### Reader Packages (High Priority)
- [ ] **reader** - Many examples in:
- `array.go` (12 examples)
- `bind.go` (10 examples)
- `curry.go` (8 examples)
- `flip.go` (2 examples)
- `reader.go` (21 examples)
- [ ] **readeroption** - Examples in:
- `array.go` (3 examples)
- `bind.go` (7 examples)
- `curry.go` (5 examples)
- `flip.go` (2 examples)
- `from.go` (4 examples)
- `reader.go` (18 examples)
- `sequence.go` (4 examples)
- [ ] **readerresult** - Examples in:
- `array.go` (3 examples)
- `bind.go` (24 examples)
- `curry.go` (7 examples)
- `flip.go` (2 examples)
- `from.go` (4 examples)
- `monoid.go` (3 examples)
- [ ] **readereither** - Examples in:
- `array.go` (3 examples)
- `bind.go` (7 examples)
- `flip.go` (3 examples)
- [ ] **readerio** - Examples in:
- `array.go` (3 examples)
- `bind.go` (7 examples)
- `flip.go` (2 examples)
- `logging.go` (4 examples)
- `reader.go` (30 examples)
- [ ] **readerioeither** - Examples in:
- `bind.go` (7 examples)
- `flip.go` (1 example)
- [ ] **readerioresult** - Examples in:
- `array.go` (8 examples)
- `bind.go` (24 examples)
### State Packages
- [ ] **statereaderioeither** - Examples in:
- `bind.go` (5 examples)
- `resource.go` (1 example)
- `state.go` (13 examples)
### Utility Packages
- [ ] **lazy** - Additional examples in:
- `apply.go` (2 examples)
- `bind.go` (7 examples)
- `lazy.go` (10 examples)
- `sequence.go` (4 examples)
- `traverse.go` (2 examples)
- [ ] **pair** - Additional examples in:
- `monad.go` (12 examples)
- `pair.go` (remaining ~20 examples)
- [ ] **tuple** - Examples in:
- `tuple.go` (6 examples)
- [ ] **predicate** - Examples in:
- `bool.go` (3 examples)
- `contramap.go` (1 example)
- `monoid.go` (4 examples)
- [ ] **retry** - Examples in:
- `retry.go` (7 examples)
- [ ] **logging** - Examples in:
- `logger.go` (5 examples)
### Collection Packages
- [ ] **record** - Examples in:
- `bind.go` (3 examples)
### Type Class Packages
- [ ] **semigroup** - Examples in:
- `alt.go` (1 example)
- `apply.go` (1 example)
- `array.go` (4 examples)
- `semigroup.go` (7 examples)
- [ ] **ord** - Examples in:
- `ord.go` (1 example)
## Strategy for Completion
1. **Prioritize by usage**: Focus on core packages (result, option, either) first
2. **Group by package**: Complete all examples for one package before moving to next
3. **Test incrementally**: Run tests after each file to catch errors early
4. **Follow patterns**: Use existing example test files as templates
5. **Document as you go**: Update this file with progress
## Example Test File Template
```go
// Copyright header...
package packagename_test
import (
"fmt"
PKG "github.com/IBM/fp-go/v2/packagename"
)
func ExampleFunctionName() {
// Copy example from doc comment
// Ensure it compiles and produces correct output
fmt.Println(result)
// Output:
// expected output
}
```
## Notes
- Use `F.Constant1[error](defaultValue)` for GetOrElse in result package
- Use `F.Pipe1` instead of `F.Pipe2` when only one transformation
- Check function signatures carefully for type parameters
- Some functions like `BiMap` are capitalized differently than in docs
- **Prefer `R.Eitherize1(func)` over manual error handling** - converts `func(T) (R, error)` to `func(T) Result[R]`
- Example: Use `R.Eitherize1(strconv.Atoi)` instead of manual if/else error checking
- **Add Go documentation comments to all example functions** - Each example should have a comment explaining what it demonstrates
- **Idiomatic vs Non-Idiomatic packages**:
- Non-idiomatic (e.g., `result`): Uses `Result[A]` type (Either monad)
- Idiomatic (e.g., `idiomatic/result`): Uses `(A, error)` tuples (Go-style)
- Context readers use non-idiomatic `Result[A]` internally

View File

@@ -314,7 +314,7 @@ if err != nil {
```go
// Map transforms the success value
double := result.Map(func(x int) int { return x * 2 })
double := result.Map(N.Mul(2))
result := double(result.Right[error](21)) // Right(42)
// Chain sequences operations
@@ -330,7 +330,7 @@ validate := result.Chain(func(x int) result.Result[int] {
```go
// Map transforms the success value
double := result.Map(func(x int) int { return x * 2 })
double := result.Map(N.Mul(2))
value, err := double(21, nil) // (42, nil)
// Chain sequences operations

View File

@@ -0,0 +1,174 @@
# Idiomatic ReadIOResult Functions - Implementation Plan
## Overview
This document outlines the idiomatic functions that should be added to the `readerioresult` package to support Go's native `(value, error)` pattern, similar to what was implemented for `readerresult`.
## Key Concepts
The idiomatic package `github.com/IBM/fp-go/v2/idiomatic/readerioresult` defines:
- `ReaderIOResult[R, A]` as `func(R) func() (A, error)` (idiomatic style)
- This contrasts with `readerioresult.ReaderIOResult[R, A]` which is `Reader[R, IOResult[A]]` (functional style)
## Functions to Add
### In `readerioresult/reader.go`
Add helper functions at the top:
```go
func fromReaderIOResultKleisliI[R, A, B any](f RIORI.Kleisli[R, A, B]) Kleisli[R, A, B] {
return function.Flow2(f, FromReaderIOResultI[R, B])
}
func fromIOResultKleisliI[A, B any](f IORI.Kleisli[A, B]) ioresult.Kleisli[A, B] {
return ioresult.Eitherize1(f)
}
```
### Core Conversion Functions
1. **FromResultI** - Lift `(value, error)` to ReaderIOResult
```go
func FromResultI[R, A any](a A, err error) ReaderIOResult[R, A]
```
2. **FromIOResultI** - Lift idiomatic IOResult to functional
```go
func FromIOResultI[R, A any](ioe func() (A, error)) ReaderIOResult[R, A]
```
3. **FromReaderIOResultI** - Convert idiomatic ReaderIOResult to functional
```go
func FromReaderIOResultI[R, A any](rr RIORI.ReaderIOResult[R, A]) ReaderIOResult[R, A]
```
### Chain Functions
4. **MonadChainI** / **ChainI** - Chain with idiomatic Kleisli
```go
func MonadChainI[R, A, B any](ma ReaderIOResult[R, A], f RIORI.Kleisli[R, A, B]) ReaderIOResult[R, B]
func ChainI[R, A, B any](f RIORI.Kleisli[R, A, B]) Operator[R, A, B]
```
5. **MonadChainEitherIK** / **ChainEitherIK** - Chain with idiomatic Result functions
```go
func MonadChainEitherIK[R, A, B any](ma ReaderIOResult[R, A], f func(A) (B, error)) ReaderIOResult[R, B]
func ChainEitherIK[R, A, B any](f func(A) (B, error)) Operator[R, A, B]
```
6. **MonadChainIOResultIK** / **ChainIOResultIK** - Chain with idiomatic IOResult
```go
func MonadChainIOResultIK[R, A, B any](ma ReaderIOResult[R, A], f func(A) func() (B, error)) ReaderIOResult[R, B]
func ChainIOResultIK[R, A, B any](f func(A) func() (B, error)) Operator[R, A, B]
```
### Applicative Functions
7. **MonadApI** / **ApI** - Apply with idiomatic value
```go
func MonadApI[B, R, A any](fab ReaderIOResult[R, func(A) B], fa RIORI.ReaderIOResult[R, A]) ReaderIOResult[R, B]
func ApI[B, R, A any](fa RIORI.ReaderIOResult[R, A]) Operator[R, func(A) B, B]
```
### Error Handling Functions
8. **OrElseI** - Fallback with idiomatic computation
```go
func OrElseI[R, A any](onLeft RIORI.Kleisli[R, error, A]) Operator[R, A, A]
```
9. **MonadAltI** / **AltI** - Alternative with idiomatic computation
```go
func MonadAltI[R, A any](first ReaderIOResult[R, A], second Lazy[RIORI.ReaderIOResult[R, A]]) ReaderIOResult[R, A]
func AltI[R, A any](second Lazy[RIORI.ReaderIOResult[R, A]]) Operator[R, A, A]
```
### Flatten Functions
10. **FlattenI** - Flatten nested idiomatic ReaderIOResult
```go
func FlattenI[R, A any](mma ReaderIOResult[R, RIORI.ReaderIOResult[R, A]]) ReaderIOResult[R, A]
```
### In `readerioresult/bind.go`
11. **BindI** - Bind with idiomatic Kleisli
```go
func BindI[R, S1, S2, T any](setter func(T) func(S1) S2, f RIORI.Kleisli[R, S1, T]) Operator[R, S1, S2]
```
12. **ApIS** - Apply idiomatic value to state
```go
func ApIS[R, S1, S2, T any](setter func(T) func(S1) S2, fa RIORI.ReaderIOResult[R, T]) Operator[R, S1, S2]
```
13. **ApISL** - Apply idiomatic value using lens
```go
func ApISL[R, S, T any](lens L.Lens[S, T], fa RIORI.ReaderIOResult[R, T]) Operator[R, S, S]
```
14. **BindIL** - Bind idiomatic with lens
```go
func BindIL[R, S, T any](lens L.Lens[S, T], f RIORI.Kleisli[R, T, T]) Operator[R, S, S]
```
15. **BindEitherIK** / **BindResultIK** - Bind idiomatic Result
```go
func BindEitherIK[R, S1, S2, T any](setter func(T) func(S1) S2, f func(S1) (T, error)) Operator[R, S1, S2]
func BindResultIK[R, S1, S2, T any](setter func(T) func(S1) S2, f func(S1) (T, error)) Operator[R, S1, S2]
```
16. **BindIOResultIK** - Bind idiomatic IOResult
```go
func BindIOResultIK[R, S1, S2, T any](setter func(T) func(S1) S2, f func(S1) func() (T, error)) Operator[R, S1, S2]
```
17. **BindToEitherI** / **BindToResultI** - Initialize from idiomatic pair
```go
func BindToEitherI[R, S1, T any](setter func(T) S1) func(T, error) ReaderIOResult[R, S1]
func BindToResultI[R, S1, T any](setter func(T) S1) func(T, error) ReaderIOResult[R, S1]
```
18. **BindToIOResultI** - Initialize from idiomatic IOResult
```go
func BindToIOResultI[R, S1, T any](setter func(T) S1) func(func() (T, error)) ReaderIOResult[R, S1]
```
19. **ApEitherIS** / **ApResultIS** - Apply idiomatic pair to state
```go
func ApEitherIS[R, S1, S2, T any](setter func(T) func(S1) S2) func(T, error) Operator[R, S1, S2]
func ApResultIS[R, S1, S2, T any](setter func(T) func(S1) S2) func(T, error) Operator[R, S1, S2]
```
20. **ApIOResultIS** - Apply idiomatic IOResult to state
```go
func ApIOResultIS[R, S1, S2, T any](setter func(T) func(S1) S2, fa func() (T, error)) Operator[R, S1, S2]
```
## Testing Strategy
Create `readerioresult/idiomatic_test.go` with:
- Tests for each idiomatic function
- Success and error cases
- Integration tests showing real-world usage patterns
- Parallel execution tests where applicable
- Complex scenarios combining multiple idiomatic functions
## Implementation Priority
1. **High Priority** - Core conversion and chain functions (1-6)
2. **Medium Priority** - Bind functions for do-notation (11-16)
3. **Low Priority** - Advanced applicative and error handling (7-10, 17-20)
## Benefits
1. **Seamless Integration** - Mix Go idiomatic code with functional pipelines
2. **Gradual Adoption** - Convert code incrementally from idiomatic to functional
3. **Interoperability** - Work with existing Go libraries that return `(value, error)`
4. **Consistency** - Mirrors the successful pattern from `readerresult`
## References
- See `readerresult` package for similar implementations
- See `idiomatic/readerresult` for the idiomatic types
- See `idiomatic/ioresult` for IO-level idiomatic patterns

View File

@@ -61,6 +61,7 @@ package main
import (
"fmt"
"github.com/IBM/fp-go/v2/option"
N "github.com/IBM/fp-go/v2/number"
)
func main() {
@@ -145,6 +146,8 @@ func main() {
}
```
## ⚠️ Breaking Changes
### From V1 to V2
#### 1. Generic Type Aliases
@@ -205,7 +208,7 @@ The `Compose` function for endomorphisms now follows **mathematical function com
```go
// Compose executed left-to-right
double := N.Mul(2)
increment := func(x int) int { return x + 1 }
increment := N.Add(1)
composed := Compose(double, increment)
result := composed(5) // (5 * 2) + 1 = 11
```
@@ -214,7 +217,7 @@ result := composed(5) // (5 * 2) + 1 = 11
```go
// Compose executes RIGHT-TO-LEFT (mathematical composition)
double := N.Mul(2)
increment := func(x int) int { return x + 1 }
increment := N.Add(1)
composed := Compose(double, increment)
result := composed(5) // (5 + 1) * 2 = 12

View File

@@ -536,3 +536,89 @@ func Flap[B, A any](a A) Operator[func(A) B, B] {
func Prepend[A any](head A) Operator[A, A] {
return G.Prepend[Operator[A, A]](head)
}
// Reverse returns a new slice with elements in reverse order.
// This function creates a new slice containing all elements from the input slice
// in reverse order, without modifying the original slice.
//
// Type Parameters:
// - A: The type of elements in the slice
//
// Parameters:
// - as: The input slice to reverse
//
// Returns:
// - A new slice with elements in reverse order
//
// Behavior:
// - Creates a new slice with the same length as the input
// - Copies elements from the input slice in reverse order
// - Does not modify the original slice
// - Returns an empty slice if the input is empty
// - Returns a single-element slice unchanged if input has one element
//
// Example:
//
// numbers := []int{1, 2, 3, 4, 5}
// reversed := array.Reverse(numbers)
// // reversed: []int{5, 4, 3, 2, 1}
// // numbers: []int{1, 2, 3, 4, 5} (unchanged)
//
// Example with strings:
//
// words := []string{"hello", "world", "foo", "bar"}
// reversed := array.Reverse(words)
// // reversed: []string{"bar", "foo", "world", "hello"}
//
// Example with empty slice:
//
// empty := []int{}
// reversed := array.Reverse(empty)
// // reversed: []int{} (empty slice)
//
// Example with single element:
//
// single := []string{"only"}
// reversed := array.Reverse(single)
// // reversed: []string{"only"}
//
// Use cases:
// - Reversing the order of elements for display or processing
// - Implementing stack-like behavior (LIFO)
// - Processing data in reverse chronological order
// - Reversing transformation pipelines
// - Creating palindrome checks
// - Implementing undo/redo functionality
//
// Example with processing in reverse:
//
// events := []string{"start", "middle", "end"}
// reversed := array.Reverse(events)
// // Process events in reverse order
// for _, event := range reversed {
// fmt.Println(event) // Prints: "end", "middle", "start"
// }
//
// Example with functional composition:
//
// numbers := []int{1, 2, 3, 4, 5}
// result := F.Pipe2(
// numbers,
// array.Map(N.Mul(2)),
// array.Reverse,
// )
// // result: []int{10, 8, 6, 4, 2}
//
// Performance:
// - Time complexity: O(n) where n is the length of the slice
// - Space complexity: O(n) for the new slice
// - Does not allocate if the input slice is empty
//
// Note: This function is immutable - it does not modify the original slice.
// If you need to reverse a slice in-place, consider using a different approach
// or modifying the slice directly.
//
//go:inline
func Reverse[A any](as []A) []A {
return G.Reverse(as)
}

View File

@@ -35,7 +35,7 @@ func TestReplicate(t *testing.T) {
func TestMonadMap(t *testing.T) {
src := []int{1, 2, 3}
result := MonadMap(src, func(x int) int { return x * 2 })
result := MonadMap(src, N.Mul(2))
assert.Equal(t, []int{2, 4, 6}, result)
}
@@ -173,8 +173,8 @@ func TestChain(t *testing.T) {
func TestMonadAp(t *testing.T) {
fns := []func(int) int{
func(x int) int { return x * 2 },
func(x int) int { return x + 10 },
N.Mul(2),
N.Add(10),
}
values := []int{1, 2}
result := MonadAp(fns, values)
@@ -268,7 +268,7 @@ func TestCopy(t *testing.T) {
func TestClone(t *testing.T) {
src := []int{1, 2, 3}
cloner := Clone(func(x int) int { return x * 2 })
cloner := Clone(N.Mul(2))
result := cloner(src)
assert.Equal(t, []int{2, 4, 6}, result)
}

View File

@@ -22,6 +22,7 @@ import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/utils"
N "github.com/IBM/fp-go/v2/number"
O "github.com/IBM/fp-go/v2/option"
S "github.com/IBM/fp-go/v2/string"
T "github.com/IBM/fp-go/v2/tuple"
@@ -214,3 +215,262 @@ func ExampleFoldMap() {
// Output: ABC
}
// TestReverse tests the Reverse function
func TestReverse(t *testing.T) {
t.Run("Reverse integers", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5}
result := Reverse(input)
expected := []int{5, 4, 3, 2, 1}
assert.Equal(t, expected, result)
})
t.Run("Reverse strings", func(t *testing.T) {
input := []string{"hello", "world", "foo", "bar"}
result := Reverse(input)
expected := []string{"bar", "foo", "world", "hello"}
assert.Equal(t, expected, result)
})
t.Run("Reverse empty slice", func(t *testing.T) {
input := []int{}
result := Reverse(input)
assert.Equal(t, []int{}, result)
})
t.Run("Reverse single element", func(t *testing.T) {
input := []string{"only"}
result := Reverse(input)
assert.Equal(t, []string{"only"}, result)
})
t.Run("Reverse two elements", func(t *testing.T) {
input := []int{1, 2}
result := Reverse(input)
assert.Equal(t, []int{2, 1}, result)
})
t.Run("Does not modify original slice", func(t *testing.T) {
original := []int{1, 2, 3, 4, 5}
originalCopy := []int{1, 2, 3, 4, 5}
_ = Reverse(original)
assert.Equal(t, originalCopy, original)
})
t.Run("Reverse with floats", func(t *testing.T) {
input := []float64{1.1, 2.2, 3.3}
result := Reverse(input)
expected := []float64{3.3, 2.2, 1.1}
assert.Equal(t, expected, result)
})
t.Run("Reverse with structs", func(t *testing.T) {
type Person struct {
Name string
Age int
}
input := []Person{
{"Alice", 30},
{"Bob", 25},
{"Charlie", 35},
}
result := Reverse(input)
expected := []Person{
{"Charlie", 35},
{"Bob", 25},
{"Alice", 30},
}
assert.Equal(t, expected, result)
})
t.Run("Reverse with pointers", func(t *testing.T) {
a, b, c := 1, 2, 3
input := []*int{&a, &b, &c}
result := Reverse(input)
assert.Equal(t, []*int{&c, &b, &a}, result)
})
t.Run("Double reverse returns original order", func(t *testing.T) {
original := []int{1, 2, 3, 4, 5}
reversed := Reverse(original)
doubleReversed := Reverse(reversed)
assert.Equal(t, original, doubleReversed)
})
t.Run("Reverse with large slice", func(t *testing.T) {
input := MakeBy(1000, F.Identity[int])
result := Reverse(input)
// Check first and last elements
assert.Equal(t, 999, result[0])
assert.Equal(t, 0, result[999])
// Check length
assert.Equal(t, 1000, len(result))
})
t.Run("Reverse palindrome", func(t *testing.T) {
input := []int{1, 2, 3, 2, 1}
result := Reverse(input)
assert.Equal(t, input, result)
})
}
// TestReverseComposition tests Reverse with other array operations
func TestReverseComposition(t *testing.T) {
t.Run("Reverse after Map", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5}
result := F.Pipe2(
input,
Map(N.Mul(2)),
Reverse[int],
)
expected := []int{10, 8, 6, 4, 2}
assert.Equal(t, expected, result)
})
t.Run("Map after Reverse", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5}
result := F.Pipe2(
input,
Reverse[int],
Map(N.Mul(2)),
)
expected := []int{10, 8, 6, 4, 2}
assert.Equal(t, expected, result)
})
t.Run("Reverse with Filter", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5, 6}
result := F.Pipe2(
input,
Filter(func(n int) bool { return n%2 == 0 }),
Reverse[int],
)
expected := []int{6, 4, 2}
assert.Equal(t, expected, result)
})
t.Run("Reverse with Reduce", func(t *testing.T) {
input := []string{"a", "b", "c"}
reversed := Reverse(input)
result := Reduce(func(acc, val string) string {
return acc + val
}, "")(reversed)
assert.Equal(t, "cba", result)
})
t.Run("Reverse with Flatten", func(t *testing.T) {
input := [][]int{{1, 2}, {3, 4}, {5, 6}}
result := F.Pipe2(
input,
Reverse[[]int],
Flatten[int],
)
expected := []int{5, 6, 3, 4, 1, 2}
assert.Equal(t, expected, result)
})
}
// TestReverseUseCases demonstrates practical use cases for Reverse
func TestReverseUseCases(t *testing.T) {
t.Run("Process events in reverse chronological order", func(t *testing.T) {
events := []string{"2024-01-01", "2024-01-02", "2024-01-03"}
reversed := Reverse(events)
// Most recent first
assert.Equal(t, "2024-01-03", reversed[0])
assert.Equal(t, "2024-01-01", reversed[2])
})
t.Run("Implement stack behavior (LIFO)", func(t *testing.T) {
stack := []int{1, 2, 3, 4, 5}
reversed := Reverse(stack)
// Pop from reversed (LIFO)
assert.Equal(t, 5, reversed[0])
assert.Equal(t, 4, reversed[1])
})
t.Run("Reverse string characters", func(t *testing.T) {
chars := []rune("hello")
reversed := Reverse(chars)
result := string(reversed)
assert.Equal(t, "olleh", result)
})
t.Run("Check palindrome", func(t *testing.T) {
word := []rune("racecar")
reversed := Reverse(word)
assert.Equal(t, word, reversed)
notPalindrome := []rune("hello")
reversedNot := Reverse(notPalindrome)
assert.NotEqual(t, notPalindrome, reversedNot)
})
t.Run("Reverse transformation pipeline", func(t *testing.T) {
// Apply transformations in reverse order
numbers := []int{1, 2, 3}
// Normal: add 10, then multiply by 2
normal := F.Pipe2(
numbers,
Map(N.Add(10)),
Map(N.Mul(2)),
)
// Reversed order of operations
reversed := F.Pipe2(
numbers,
Map(N.Mul(2)),
Map(N.Add(10)),
)
assert.NotEqual(t, normal, reversed)
assert.Equal(t, []int{22, 24, 26}, normal)
assert.Equal(t, []int{12, 14, 16}, reversed)
})
}
// TestReverseProperties tests mathematical properties of Reverse
func TestReverseProperties(t *testing.T) {
t.Run("Involution property: Reverse(Reverse(x)) == x", func(t *testing.T) {
testCases := [][]int{
{1, 2, 3, 4, 5},
{1},
{},
{1, 2},
{5, 4, 3, 2, 1},
}
for _, original := range testCases {
result := Reverse(Reverse(original))
assert.Equal(t, original, result)
}
})
t.Run("Length preservation: len(Reverse(x)) == len(x)", func(t *testing.T) {
testCases := [][]int{
{1, 2, 3, 4, 5},
{1},
{},
MakeBy(100, F.Identity[int]),
}
for _, input := range testCases {
result := Reverse(input)
assert.Equal(t, len(input), len(result))
}
})
t.Run("First element becomes last", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5}
result := Reverse(input)
if len(input) > 0 {
assert.Equal(t, input[0], result[len(result)-1])
assert.Equal(t, input[len(input)-1], result[0])
}
})
}

View File

@@ -19,7 +19,7 @@ import (
E "github.com/IBM/fp-go/v2/eq"
)
func equals[T any](left []T, right []T, eq func(T, T) bool) bool {
func equals[T any](left, right []T, eq func(T, T) bool) bool {
if len(left) != len(right) {
return false
}

View File

@@ -140,22 +140,27 @@ func Empty[GA ~[]A, A any]() GA {
return array.Empty[GA]()
}
//go:inline
func UpsertAt[GA ~[]A, A any](a A) func(GA) GA {
return array.UpsertAt[GA](a)
}
//go:inline
func MonadMap[GA ~[]A, GB ~[]B, A, B any](as GA, f func(a A) B) GB {
return array.MonadMap[GA, GB](as, f)
}
//go:inline
func Map[GA ~[]A, GB ~[]B, A, B any](f func(a A) B) func(GA) GB {
return array.Map[GA, GB](f)
}
//go:inline
func MonadMapWithIndex[GA ~[]A, GB ~[]B, A, B any](as GA, f func(int, A) B) GB {
return array.MonadMapWithIndex[GA, GB](as, f)
}
//go:inline
func MapWithIndex[GA ~[]A, GB ~[]B, A, B any](f func(int, A) B) func(GA) GB {
return F.Bind2nd(MonadMapWithIndex[GA, GB, A, B], f)
}
@@ -297,7 +302,7 @@ func MatchLeft[AS ~[]A, A, B any](onEmpty func() B, onNonEmpty func(A, AS) B) fu
}
//go:inline
func Slice[AS ~[]A, A any](start int, end int) func(AS) AS {
func Slice[AS ~[]A, A any](start, end int) func(AS) AS {
return array.Slice[AS](start, end)
}
@@ -361,6 +366,12 @@ func Flap[FAB ~func(A) B, GFAB ~[]FAB, GB ~[]B, A, B any](a A) func(GFAB) GB {
return FC.Flap(Map[GFAB, GB], a)
}
//go:inline
func Prepend[ENDO ~func(AS) AS, AS []A, A any](head A) ENDO {
return array.Prepend[ENDO](head)
}
//go:inline
func Reverse[GT ~[]T, T any](as GT) GT {
return array.Reverse(as)
}

View File

@@ -18,14 +18,11 @@ package nonempty
import (
G "github.com/IBM/fp-go/v2/array/generic"
EM "github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/array"
"github.com/IBM/fp-go/v2/option"
S "github.com/IBM/fp-go/v2/semigroup"
)
// NonEmptyArray represents an array with at least one element
type NonEmptyArray[A any] []A
// Of constructs a single element array
func Of[A any](first A) NonEmptyArray[A] {
return G.Of[NonEmptyArray[A]](first)
@@ -44,20 +41,24 @@ func From[A any](first A, data ...A) NonEmptyArray[A] {
return buffer
}
//go:inline
func IsEmpty[A any](_ NonEmptyArray[A]) bool {
return false
}
//go:inline
func IsNonEmpty[A any](_ NonEmptyArray[A]) bool {
return true
}
//go:inline
func MonadMap[A, B any](as NonEmptyArray[A], f func(a A) B) NonEmptyArray[B] {
return G.MonadMap[NonEmptyArray[A], NonEmptyArray[B]](as, f)
}
func Map[A, B any](f func(a A) B) func(NonEmptyArray[A]) NonEmptyArray[B] {
return F.Bind2nd(MonadMap[A, B], f)
//go:inline
func Map[A, B any](f func(a A) B) Operator[A, B] {
return G.Map[NonEmptyArray[A], NonEmptyArray[B]](f)
}
func Reduce[A, B any](f func(B, A) B, initial B) func(NonEmptyArray[A]) B {
@@ -72,22 +73,27 @@ func ReduceRight[A, B any](f func(A, B) B, initial B) func(NonEmptyArray[A]) B {
}
}
//go:inline
func Tail[A any](as NonEmptyArray[A]) []A {
return as[1:]
}
//go:inline
func Head[A any](as NonEmptyArray[A]) A {
return as[0]
}
//go:inline
func First[A any](as NonEmptyArray[A]) A {
return as[0]
}
//go:inline
func Last[A any](as NonEmptyArray[A]) A {
return as[len(as)-1]
}
//go:inline
func Size[A any](as NonEmptyArray[A]) int {
return G.Size(as)
}
@@ -96,11 +102,11 @@ func Flatten[A any](mma NonEmptyArray[NonEmptyArray[A]]) NonEmptyArray[A] {
return G.Flatten(mma)
}
func MonadChain[A, B any](fa NonEmptyArray[A], f func(a A) NonEmptyArray[B]) NonEmptyArray[B] {
func MonadChain[A, B any](fa NonEmptyArray[A], f Kleisli[A, B]) NonEmptyArray[B] {
return G.MonadChain(fa, f)
}
func Chain[A, B any](f func(A) NonEmptyArray[B]) func(NonEmptyArray[A]) NonEmptyArray[B] {
func Chain[A, B any](f func(A) NonEmptyArray[B]) Operator[A, B] {
return G.Chain[NonEmptyArray[A]](f)
}
@@ -134,3 +140,89 @@ func Fold[A any](s S.Semigroup[A]) func(NonEmptyArray[A]) A {
func Prepend[A any](head A) EM.Endomorphism[NonEmptyArray[A]] {
return array.Prepend[EM.Endomorphism[NonEmptyArray[A]]](head)
}
// ToNonEmptyArray attempts to convert a regular slice into a NonEmptyArray.
// This function provides a safe way to create a NonEmptyArray from a slice that might be empty,
// returning an Option type to handle the case where the input slice is empty.
//
// Type Parameters:
// - A: The element type of the array
//
// Parameters:
// - as: A regular slice that may or may not be empty
//
// Returns:
// - Option[NonEmptyArray[A]]: Some(NonEmptyArray) if the input slice is non-empty, None if empty
//
// Behavior:
// - If the input slice is empty, returns None
// - If the input slice has at least one element, wraps it in Some and returns it as a NonEmptyArray
// - The conversion is a type cast, so no data is copied
//
// Example:
//
// // Convert non-empty slice
// numbers := []int{1, 2, 3}
// result := ToNonEmptyArray(numbers) // Some(NonEmptyArray[1, 2, 3])
//
// // Convert empty slice
// empty := []int{}
// result := ToNonEmptyArray(empty) // None
//
// // Use with Option methods
// numbers := []int{1, 2, 3}
// result := ToNonEmptyArray(numbers)
// if O.IsSome(result) {
// nea := O.GetOrElse(F.Constant(From(0)))(result)
// head := Head(nea) // 1
// }
//
// Use cases:
// - Safely converting user input or external data to NonEmptyArray
// - Validating that a collection has at least one element before processing
// - Converting results from functions that return regular slices
// - Ensuring type safety when working with collections that must not be empty
//
// Example with validation:
//
// func processItems(items []string) Option[string] {
// return F.Pipe2(
// items,
// ToNonEmptyArray[string],
// O.Map(func(nea NonEmptyArray[string]) string {
// return Head(nea) // Safe to get head since we know it's non-empty
// }),
// )
// }
//
// Example with error handling:
//
// items := []int{1, 2, 3}
// result := ToNonEmptyArray(items)
// switch {
// case O.IsSome(result):
// nea := O.GetOrElse(F.Constant(From(0)))(result)
// fmt.Println("First item:", Head(nea))
// case O.IsNone(result):
// fmt.Println("Array is empty")
// }
//
// Example with chaining:
//
// // Process only if non-empty
// result := F.Pipe3(
// []int{1, 2, 3},
// ToNonEmptyArray[int],
// O.Map(Map(func(x int) int { return x * 2 })),
// O.Map(Head[int]),
// ) // Some(2)
//
// Note: This function is particularly useful when working with APIs or functions
// that return regular slices but you need the type-level guarantee that the
// collection is non-empty for subsequent operations.
func ToNonEmptyArray[A any](as []A) Option[NonEmptyArray[A]] {
if G.IsEmpty(as) {
return option.None[NonEmptyArray[A]]()
}
return option.Some(NonEmptyArray[A](as))
}

View File

@@ -0,0 +1,370 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package nonempty
import (
"testing"
F "github.com/IBM/fp-go/v2/function"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
// TestToNonEmptyArray tests the ToNonEmptyArray function
func TestToNonEmptyArray(t *testing.T) {
t.Run("Convert non-empty slice of integers", func(t *testing.T) {
input := []int{1, 2, 3}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0)))(result)
assert.Equal(t, 3, Size(nea))
assert.Equal(t, 1, Head(nea))
assert.Equal(t, 3, Last(nea))
})
t.Run("Convert empty slice returns None", func(t *testing.T) {
input := []int{}
result := ToNonEmptyArray(input)
assert.True(t, O.IsNone(result))
})
t.Run("Convert single element slice", func(t *testing.T) {
input := []string{"hello"}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From("")))(result)
assert.Equal(t, 1, Size(nea))
assert.Equal(t, "hello", Head(nea))
})
t.Run("Convert non-empty slice of strings", func(t *testing.T) {
input := []string{"a", "b", "c", "d"}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From("")))(result)
assert.Equal(t, 4, Size(nea))
assert.Equal(t, "a", Head(nea))
assert.Equal(t, "d", Last(nea))
})
t.Run("Convert nil slice returns None", func(t *testing.T) {
var input []int
result := ToNonEmptyArray(input)
assert.True(t, O.IsNone(result))
})
t.Run("Convert slice with struct elements", func(t *testing.T) {
type Person struct {
Name string
Age int
}
input := []Person{
{Name: "Alice", Age: 30},
{Name: "Bob", Age: 25},
}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(Person{})))(result)
assert.Equal(t, 2, Size(nea))
assert.Equal(t, "Alice", Head(nea).Name)
})
t.Run("Convert slice with pointer elements", func(t *testing.T) {
val1, val2 := 10, 20
input := []*int{&val1, &val2}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From[*int](nil)))(result)
assert.Equal(t, 2, Size(nea))
assert.Equal(t, 10, *Head(nea))
})
t.Run("Convert large slice", func(t *testing.T) {
input := make([]int, 1000)
for i := range input {
input[i] = i
}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0)))(result)
assert.Equal(t, 1000, Size(nea))
assert.Equal(t, 0, Head(nea))
assert.Equal(t, 999, Last(nea))
})
t.Run("Convert slice with float64 elements", func(t *testing.T) {
input := []float64{1.5, 2.5, 3.5}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0.0)))(result)
assert.Equal(t, 3, Size(nea))
assert.Equal(t, 1.5, Head(nea))
})
t.Run("Convert slice with boolean elements", func(t *testing.T) {
input := []bool{true, false, true}
result := ToNonEmptyArray(input)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(false)))(result)
assert.Equal(t, 3, Size(nea))
assert.True(t, Head(nea))
})
}
// TestToNonEmptyArrayWithOption tests ToNonEmptyArray with Option operations
func TestToNonEmptyArrayWithOption(t *testing.T) {
t.Run("Chain with Map to process elements", func(t *testing.T) {
input := []int{1, 2, 3}
result := F.Pipe2(
input,
ToNonEmptyArray[int],
O.Map(Map(func(x int) int { return x * 2 })),
)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0)))(result)
assert.Equal(t, 2, Head(nea))
assert.Equal(t, 6, Last(nea))
})
t.Run("Chain with Map to get head", func(t *testing.T) {
input := []string{"first", "second", "third"}
result := F.Pipe2(
input,
ToNonEmptyArray[string],
O.Map(Head[string]),
)
assert.True(t, O.IsSome(result))
value := O.GetOrElse(F.Constant(""))(result)
assert.Equal(t, "first", value)
})
t.Run("GetOrElse with default value for empty slice", func(t *testing.T) {
input := []int{}
defaultValue := From(42)
result := F.Pipe2(
input,
ToNonEmptyArray[int],
O.GetOrElse(F.Constant(defaultValue)),
)
assert.Equal(t, 1, Size(result))
assert.Equal(t, 42, Head(result))
})
t.Run("GetOrElse with default value for non-empty slice", func(t *testing.T) {
input := []int{1, 2, 3}
defaultValue := From(42)
result := F.Pipe2(
input,
ToNonEmptyArray[int],
O.GetOrElse(F.Constant(defaultValue)),
)
assert.Equal(t, 3, Size(result))
assert.Equal(t, 1, Head(result))
})
t.Run("Fold with Some case", func(t *testing.T) {
input := []int{1, 2, 3}
result := F.Pipe2(
input,
ToNonEmptyArray[int],
O.Fold(
F.Constant(0),
func(nea NonEmptyArray[int]) int { return Head(nea) },
),
)
assert.Equal(t, 1, result)
})
t.Run("Fold with None case", func(t *testing.T) {
input := []int{}
result := F.Pipe2(
input,
ToNonEmptyArray[int],
O.Fold(
F.Constant(-1),
func(nea NonEmptyArray[int]) int { return Head(nea) },
),
)
assert.Equal(t, -1, result)
})
}
// TestToNonEmptyArrayComposition tests composing ToNonEmptyArray with other operations
func TestToNonEmptyArrayComposition(t *testing.T) {
t.Run("Compose with filter-like operation", func(t *testing.T) {
input := []int{1, 2, 3, 4, 5}
// Filter even numbers then convert
filtered := []int{}
for _, x := range input {
if x%2 == 0 {
filtered = append(filtered, x)
}
}
result := ToNonEmptyArray(filtered)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0)))(result)
assert.Equal(t, 2, Size(nea))
assert.Equal(t, 2, Head(nea))
})
t.Run("Compose with map operation before conversion", func(t *testing.T) {
input := []int{1, 2, 3}
// Map then convert
mapped := make([]int, len(input))
for i, x := range input {
mapped[i] = x * 10
}
result := ToNonEmptyArray(mapped)
assert.True(t, O.IsSome(result))
nea := O.GetOrElse(F.Constant(From(0)))(result)
assert.Equal(t, 10, Head(nea))
assert.Equal(t, 30, Last(nea))
})
t.Run("Chain multiple Option operations", func(t *testing.T) {
input := []int{5, 10, 15}
result := F.Pipe3(
input,
ToNonEmptyArray[int],
O.Map(Map(func(x int) int { return x / 5 })),
O.Map(func(nea NonEmptyArray[int]) int {
return Head(nea) + Last(nea)
}),
)
assert.True(t, O.IsSome(result))
value := O.GetOrElse(F.Constant(0))(result)
assert.Equal(t, 4, value) // 1 + 3
})
}
// TestToNonEmptyArrayUseCases demonstrates practical use cases
func TestToNonEmptyArrayUseCases(t *testing.T) {
t.Run("Validate user input has at least one item", func(t *testing.T) {
// Simulate user input
userInput := []string{"item1", "item2"}
result := ToNonEmptyArray(userInput)
if O.IsSome(result) {
nea := O.GetOrElse(F.Constant(From("")))(result)
firstItem := Head(nea)
assert.Equal(t, "item1", firstItem)
} else {
t.Fatal("Expected Some but got None")
}
})
t.Run("Process only non-empty collections", func(t *testing.T) {
processItems := func(items []int) Option[int] {
return F.Pipe2(
items,
ToNonEmptyArray[int],
O.Map(func(nea NonEmptyArray[int]) int {
// Safe to use Head since we know it's non-empty
return Head(nea) * 2
}),
)
}
result1 := processItems([]int{5, 10, 15})
assert.True(t, O.IsSome(result1))
assert.Equal(t, 10, O.GetOrElse(F.Constant(0))(result1))
result2 := processItems([]int{})
assert.True(t, O.IsNone(result2))
})
t.Run("Convert API response to NonEmptyArray", func(t *testing.T) {
// Simulate API response
type APIResponse struct {
Items []string
}
response := APIResponse{Items: []string{"data1", "data2", "data3"}}
result := F.Pipe2(
response.Items,
ToNonEmptyArray[string],
O.Map(func(nea NonEmptyArray[string]) string {
return "First item: " + Head(nea)
}),
)
assert.True(t, O.IsSome(result))
message := O.GetOrElse(F.Constant("No items"))(result)
assert.Equal(t, "First item: data1", message)
})
t.Run("Ensure collection is non-empty before processing", func(t *testing.T) {
calculateAverage := func(numbers []float64) Option[float64] {
return F.Pipe2(
numbers,
ToNonEmptyArray[float64],
O.Map(func(nea NonEmptyArray[float64]) float64 {
sum := 0.0
for _, n := range nea {
sum += n
}
return sum / float64(Size(nea))
}),
)
}
result1 := calculateAverage([]float64{10.0, 20.0, 30.0})
assert.True(t, O.IsSome(result1))
assert.Equal(t, 20.0, O.GetOrElse(F.Constant(0.0))(result1))
result2 := calculateAverage([]float64{})
assert.True(t, O.IsNone(result2))
})
t.Run("Safe head extraction with type guarantee", func(t *testing.T) {
getFirstOrDefault := func(items []string, defaultValue string) string {
return F.Pipe2(
items,
ToNonEmptyArray[string],
O.Fold(
F.Constant(defaultValue),
Head[string],
),
)
}
result1 := getFirstOrDefault([]string{"a", "b", "c"}, "default")
assert.Equal(t, "a", result1)
result2 := getFirstOrDefault([]string{}, "default")
assert.Equal(t, "default", result2)
})
}

View File

@@ -0,0 +1,15 @@
package nonempty
import "github.com/IBM/fp-go/v2/option"
type (
// NonEmptyArray represents an array with at least one element
NonEmptyArray[A any] []A
Kleisli[A, B any] = func(A) NonEmptyArray[B]
Operator[A, B any] = Kleisli[NonEmptyArray[A], B]
Option[A any] = option.Option[A]
)

710
v2/assert/assert.go Normal file
View File

@@ -0,0 +1,710 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package assert provides functional assertion helpers for testing.
//
// This package wraps testify/assert functions in a Reader monad pattern,
// allowing for composable and functional test assertions. Each assertion
// returns a Reader that takes a *testing.T and performs the assertion.
//
// # Data Last Principle
//
// This package follows the "data last" functional programming principle, where
// the data being operated on comes as the last parameter in a chain of function
// applications. This design enables several powerful functional programming patterns:
//
// 1. **Partial Application**: You can create reusable assertion functions by providing
// configuration parameters first, leaving the data and testing context for later.
//
// 2. **Function Composition**: Assertions can be composed and combined before being
// applied to actual data.
//
// 3. **Point-Free Style**: You can pass assertion functions around without immediately
// providing the data they operate on.
//
// The general pattern is:
//
// assert.Function(config)(data)(testingContext)
// ↑ ↑ ↑
// expected actual *testing.T (always last)
//
// For single-parameter assertions:
//
// assert.Function(data)(testingContext)
// ↑ ↑
// actual *testing.T (always last)
//
// Examples of "data last" in action:
//
// // Multi-parameter: expected value → actual value → testing context
// assert.Equal(42)(result)(t)
// assert.ArrayContains(3)(numbers)(t)
//
// // Single-parameter: data → testing context
// assert.NoError(err)(t)
// assert.ArrayNotEmpty(arr)(t)
//
// // Partial application - create reusable assertions
// isPositive := assert.That(func(n int) bool { return n > 0 })
// // Later, apply to different values:
// isPositive(42)(t) // Passes
// isPositive(-5)(t) // Fails
//
// // Composition - combine assertions before applying data
// validateUser := func(u User) assert.Reader {
// return assert.AllOf([]assert.Reader{
// assert.Equal("Alice")(u.Name),
// assert.That(func(age int) bool { return age >= 18 })(u.Age),
// })
// }
// validateUser(user)(t)
//
// The package supports:
// - Equality and inequality assertions
// - Collection assertions (arrays, maps, strings)
// - Error handling assertions
// - Result type assertions
// - Custom predicate assertions
// - Composable test suites
//
// Example:
//
// func TestExample(t *testing.T) {
// value := 42
// assert.Equal(42)(value)(t) // Curried style
//
// // Composing multiple assertions
// arr := []int{1, 2, 3}
// assertions := assert.AllOf([]assert.Reader{
// assert.ArrayNotEmpty(arr),
// assert.ArrayLength[int](3)(arr),
// assert.ArrayContains(2)(arr),
// })
// assertions(t)
// }
package assert
import (
"fmt"
"testing"
"github.com/IBM/fp-go/v2/boolean"
"github.com/IBM/fp-go/v2/eq"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/result"
"github.com/stretchr/testify/assert"
)
var (
// Eq is the equal predicate checking if objects are equal
Eq = eq.FromEquals(assert.ObjectsAreEqual)
)
// wrap1 is an internal helper function that wraps testify assertion functions
// into the Reader monad pattern with curried parameters.
//
// It takes a testify assertion function and converts it into a curried function
// that first takes an expected value, then an actual value, and finally returns
// a Reader that performs the assertion when given a *testing.T.
//
// Parameters:
// - wrapped: The testify assertion function to wrap
// - expected: The expected value for comparison
// - msgAndArgs: Optional message and arguments for assertion failure
//
// Returns:
// - A Kleisli function that takes the actual value and returns a Reader
func wrap1[T any](wrapped func(t assert.TestingT, expected, actual any, msgAndArgs ...any) bool, expected T, msgAndArgs ...any) Kleisli[T] {
return func(actual T) Reader {
return func(t *testing.T) bool {
return wrapped(t, expected, actual, msgAndArgs...)
}
}
}
// NotEqual tests if the expected and the actual values are not equal.
//
// This function follows the "data last" principle - you provide the expected value first,
// then the actual value, and finally the testing.T context.
//
// Example:
//
// func TestNotEqual(t *testing.T) {
// value := 42
// assert.NotEqual(10)(value)(t) // Passes: 42 != 10
// assert.NotEqual(42)(value)(t) // Fails: 42 == 42
// }
func NotEqual[T any](expected T) Kleisli[T] {
return wrap1(assert.NotEqual, expected)
}
// Equal tests if the expected and the actual values are equal.
//
// This is one of the most commonly used assertions. It follows the "data last" principle -
// you provide the expected value first, then the actual value, and finally the testing.T context.
//
// Example:
//
// func TestEqual(t *testing.T) {
// result := 2 + 2
// assert.Equal(4)(result)(t) // Passes
//
// name := "Alice"
// assert.Equal("Alice")(name)(t) // Passes
//
// // Can be composed with other assertions
// user := User{Name: "Bob", Age: 30}
// assertions := assert.AllOf([]assert.Reader{
// assert.Equal("Bob")(user.Name),
// assert.Equal(30)(user.Age),
// })
// assertions(t)
// }
func Equal[T any](expected T) Kleisli[T] {
return wrap1(assert.Equal, expected)
}
// ArrayNotEmpty checks if an array is not empty.
//
// Example:
//
// func TestArrayNotEmpty(t *testing.T) {
// numbers := []int{1, 2, 3}
// assert.ArrayNotEmpty(numbers)(t) // Passes
//
// empty := []int{}
// assert.ArrayNotEmpty(empty)(t) // Fails
// }
func ArrayNotEmpty[T any](arr []T) Reader {
return func(t *testing.T) bool {
return assert.NotEmpty(t, arr)
}
}
// RecordNotEmpty checks if a map is not empty.
//
// Example:
//
// func TestRecordNotEmpty(t *testing.T) {
// config := map[string]int{"timeout": 30, "retries": 3}
// assert.RecordNotEmpty(config)(t) // Passes
//
// empty := map[string]int{}
// assert.RecordNotEmpty(empty)(t) // Fails
// }
func RecordNotEmpty[K comparable, T any](mp map[K]T) Reader {
return func(t *testing.T) bool {
return assert.NotEmpty(t, mp)
}
}
// StringNotEmpty checks if a string is not empty.
//
// Example:
//
// func TestStringNotEmpty(t *testing.T) {
// message := "Hello, World!"
// assert.StringNotEmpty(message)(t) // Passes
//
// empty := ""
// assert.StringNotEmpty(empty)(t) // Fails
// }
func StringNotEmpty(s string) Reader {
return func(t *testing.T) bool {
return assert.NotEmpty(t, s)
}
}
// ArrayLength tests if an array has the expected length.
//
// Example:
//
// func TestArrayLength(t *testing.T) {
// numbers := []int{1, 2, 3, 4, 5}
// assert.ArrayLength[int](5)(numbers)(t) // Passes
// assert.ArrayLength[int](3)(numbers)(t) // Fails
// }
func ArrayLength[T any](expected int) Kleisli[[]T] {
return func(actual []T) Reader {
return func(t *testing.T) bool {
return assert.Len(t, actual, expected)
}
}
}
// RecordLength tests if a map has the expected length.
//
// Example:
//
// func TestRecordLength(t *testing.T) {
// config := map[string]string{"host": "localhost", "port": "8080"}
// assert.RecordLength[string, string](2)(config)(t) // Passes
// assert.RecordLength[string, string](3)(config)(t) // Fails
// }
func RecordLength[K comparable, T any](expected int) Kleisli[map[K]T] {
return func(actual map[K]T) Reader {
return func(t *testing.T) bool {
return assert.Len(t, actual, expected)
}
}
}
// StringLength tests if a string has the expected length.
//
// Example:
//
// func TestStringLength(t *testing.T) {
// message := "Hello"
// assert.StringLength[any, any](5)(message)(t) // Passes
// assert.StringLength[any, any](10)(message)(t) // Fails
// }
func StringLength[K comparable, T any](expected int) Kleisli[string] {
return func(actual string) Reader {
return func(t *testing.T) bool {
return assert.Len(t, actual, expected)
}
}
}
// NoError validates that there is no error.
//
// This is commonly used to assert that operations complete successfully.
//
// Example:
//
// func TestNoError(t *testing.T) {
// err := doSomething()
// assert.NoError(err)(t) // Passes if err is nil
//
// // Can be used with result types
// result := result.TryCatch(func() (int, error) {
// return 42, nil
// })
// assert.Success(result)(t) // Uses NoError internally
// }
func NoError(err error) Reader {
return func(t *testing.T) bool {
return assert.NoError(t, err)
}
}
// Error validates that there is an error.
//
// This is used to assert that operations fail as expected.
//
// Example:
//
// func TestError(t *testing.T) {
// err := validateInput("")
// assert.Error(err)(t) // Passes if err is not nil
//
// err2 := validateInput("valid")
// assert.Error(err2)(t) // Fails if err2 is nil
// }
func Error(err error) Reader {
return func(t *testing.T) bool {
return assert.Error(t, err)
}
}
// Success checks if a [Result] represents success.
//
// This is a convenience function for testing Result types from the fp-go library.
//
// Example:
//
// func TestSuccess(t *testing.T) {
// res := result.Of[int](42)
// assert.Success(res)(t) // Passes
//
// failedRes := result.Error[int](errors.New("failed"))
// assert.Success(failedRes)(t) // Fails
// }
func Success[T any](res Result[T]) Reader {
return NoError(result.ToError(res))
}
// Failure checks if a [Result] represents failure.
//
// This is a convenience function for testing Result types from the fp-go library.
//
// Example:
//
// func TestFailure(t *testing.T) {
// res := result.Error[int](errors.New("something went wrong"))
// assert.Failure(res)(t) // Passes
//
// successRes := result.Of[int](42)
// assert.Failure(successRes)(t) // Fails
// }
func Failure[T any](res Result[T]) Reader {
return Error(result.ToError(res))
}
// ArrayContains tests if a value is contained in an array.
//
// Example:
//
// func TestArrayContains(t *testing.T) {
// numbers := []int{1, 2, 3, 4, 5}
// assert.ArrayContains(3)(numbers)(t) // Passes
// assert.ArrayContains(10)(numbers)(t) // Fails
//
// names := []string{"Alice", "Bob", "Charlie"}
// assert.ArrayContains("Bob")(names)(t) // Passes
// }
func ArrayContains[T any](expected T) Kleisli[[]T] {
return func(actual []T) Reader {
return func(t *testing.T) bool {
return assert.Contains(t, actual, expected)
}
}
}
// ContainsKey tests if a key is contained in a map.
//
// Example:
//
// func TestContainsKey(t *testing.T) {
// config := map[string]int{"timeout": 30, "retries": 3}
// assert.ContainsKey[int]("timeout")(config)(t) // Passes
// assert.ContainsKey[int]("maxSize")(config)(t) // Fails
// }
func ContainsKey[T any, K comparable](expected K) Kleisli[map[K]T] {
return func(actual map[K]T) Reader {
return func(t *testing.T) bool {
return assert.Contains(t, actual, expected)
}
}
}
// NotContainsKey tests if a key is not contained in a map.
//
// Example:
//
// func TestNotContainsKey(t *testing.T) {
// config := map[string]int{"timeout": 30, "retries": 3}
// assert.NotContainsKey[int]("maxSize")(config)(t) // Passes
// assert.NotContainsKey[int]("timeout")(config)(t) // Fails
// }
func NotContainsKey[T any, K comparable](expected K) Kleisli[map[K]T] {
return func(actual map[K]T) Reader {
return func(t *testing.T) bool {
return assert.NotContains(t, actual, expected)
}
}
}
// That asserts that a particular predicate matches.
//
// This is a powerful function that allows you to create custom assertions using predicates.
//
// Example:
//
// func TestThat(t *testing.T) {
// // Test if a number is positive
// isPositive := func(n int) bool { return n > 0 }
// assert.That(isPositive)(42)(t) // Passes
// assert.That(isPositive)(-5)(t) // Fails
//
// // Test if a string is uppercase
// isUppercase := func(s string) bool { return s == strings.ToUpper(s) }
// assert.That(isUppercase)("HELLO")(t) // Passes
// assert.That(isUppercase)("Hello")(t) // Fails
//
// // Can be combined with Local for property testing
// type User struct { Age int }
// ageIsAdult := assert.Local(func(u User) int { return u.Age })(
// assert.That(func(age int) bool { return age >= 18 }),
// )
// user := User{Age: 25}
// ageIsAdult(user)(t) // Passes
// }
func That[T any](pred Predicate[T]) Kleisli[T] {
return func(a T) Reader {
return func(t *testing.T) bool {
if pred(a) {
return true
}
return assert.Fail(t, fmt.Sprintf("Preficate %v does not match value %v", pred, a))
}
}
}
// AllOf combines multiple assertion Readers into a single Reader that passes
// only if all assertions pass.
//
// This function uses boolean AND logic (MonoidAll) to combine the results of
// all assertions. If any assertion fails, the combined assertion fails.
//
// This is useful for grouping related assertions together and ensuring all
// conditions are met.
//
// Parameters:
// - readers: Array of assertion Readers to combine
//
// Returns:
// - A single Reader that performs all assertions and returns true only if all pass
//
// Example:
//
// func TestUser(t *testing.T) {
// user := User{Name: "Alice", Age: 30, Active: true}
// assertions := assert.AllOf([]assert.Reader{
// assert.Equal("Alice")(user.Name),
// assert.Equal(30)(user.Age),
// assert.Equal(true)(user.Active),
// })
// assertions(t)
// }
//
//go:inline
func AllOf(readers []Reader) Reader {
return reader.MonadReduceArrayM(readers, boolean.MonoidAll)
}
// RunAll executes a map of named test cases, running each as a subtest.
//
// This function creates a Reader that runs multiple named test cases using
// Go's t.Run for proper test isolation and reporting. Each test case is
// executed as a separate subtest with its own name.
//
// The function returns true only if all subtests pass. This allows for
// better test organization and clearer test output.
//
// Parameters:
// - testcases: Map of test names to assertion Readers
//
// Returns:
// - A Reader that executes all named test cases and returns true if all pass
//
// Example:
//
// func TestMathOperations(t *testing.T) {
// testcases := map[string]assert.Reader{
// "addition": assert.Equal(4)(2 + 2),
// "multiplication": assert.Equal(6)(2 * 3),
// "subtraction": assert.Equal(1)(3 - 2),
// }
// assert.RunAll(testcases)(t)
// }
//
//go:inline
func RunAll(testcases map[string]Reader) Reader {
return func(t *testing.T) bool {
current := true
for k, r := range testcases {
current = current && t.Run(k, func(t1 *testing.T) {
r(t1)
})
}
return current
}
}
// Local transforms a Reader that works on type R1 into a Reader that works on type R2,
// by providing a function that converts R2 to R1. This allows you to focus a test on a
// specific property or subset of a larger data structure.
//
// This is particularly useful when you have an assertion that operates on a specific field
// or property, and you want to apply it to a complete object. Instead of extracting the
// property and then asserting on it, you can transform the assertion to work directly
// on the whole object.
//
// Parameters:
// - f: A function that extracts or transforms R2 into R1
//
// Returns:
// - A function that transforms a Reader[R1, Reader] into a Reader[R2, Reader]
//
// Example:
//
// type User struct {
// Name string
// Age int
// }
//
// // Create an assertion that checks if age is positive
// ageIsPositive := assert.That(func(age int) bool { return age > 0 })
//
// // Focus this assertion on the Age field of User
// userAgeIsPositive := assert.Local(func(u User) int { return u.Age })(ageIsPositive)
//
// // Now we can test the whole User object
// user := User{Name: "Alice", Age: 30}
// userAgeIsPositive(user)(t)
//
//go:inline
func Local[R1, R2 any](f func(R2) R1) func(Kleisli[R1]) Kleisli[R2] {
return reader.Local[Reader](f)
}
// LocalL is similar to Local but uses a Lens to focus on a specific property.
// A Lens is a functional programming construct that provides a composable way to
// focus on a part of a data structure.
//
// This function is particularly useful when you want to focus a test on a specific
// field of a struct using a lens, making the code more declarative and composable.
// Lenses are often code-generated or predefined for common data structures.
//
// Parameters:
// - l: A Lens that focuses from type S to type T
//
// Returns:
// - A function that transforms a Reader[T, Reader] into a Reader[S, Reader]
//
// Example:
//
// type Person struct {
// Name string
// Email string
// }
//
// // Assume we have a lens that focuses on the Email field
// var emailLens = lens.Prop[Person, string]("Email")
//
// // Create an assertion for email format
// validEmail := assert.That(func(email string) bool {
// return strings.Contains(email, "@")
// })
//
// // Focus this assertion on the Email property using a lens
// validPersonEmail := assert.LocalL(emailLens)(validEmail)
//
// // Test a Person object
// person := Person{Name: "Bob", Email: "bob@example.com"}
// validPersonEmail(person)(t)
//
//go:inline
func LocalL[S, T any](l Lens[S, T]) func(Kleisli[T]) Kleisli[S] {
return reader.Local[Reader](l.Get)
}
// fromOptionalGetter is an internal helper that creates an assertion Reader from
// an optional getter function. It asserts that the optional value is present (Some).
func fromOptionalGetter[S, T any](getter func(S) option.Option[T], msgAndArgs ...any) Kleisli[S] {
return func(s S) Reader {
return func(t *testing.T) bool {
return assert.True(t, option.IsSome(getter(s)), msgAndArgs...)
}
}
}
// FromOptional creates an assertion that checks if an Optional can successfully extract a value.
// An Optional is an optic that represents an optional reference to a subpart of a data structure.
//
// This function is useful when you have an Optional optic and want to assert that the optional
// value is present (Some) rather than absent (None). The assertion passes if the Optional's
// GetOption returns Some, and fails if it returns None.
//
// This enables property-focused testing where you verify that a particular optional field or
// sub-structure exists and is accessible.
//
// Parameters:
// - opt: An Optional optic that focuses from type S to type T
//
// Returns:
// - A Reader that asserts the optional value is present when applied to a value of type S
//
// Example:
//
// type Config struct {
// Database *DatabaseConfig // Optional field
// }
//
// type DatabaseConfig struct {
// Host string
// Port int
// }
//
// // Create an Optional that focuses on the Database field
// dbOptional := optional.MakeOptional(
// func(c Config) option.Option[*DatabaseConfig] {
// if c.Database != nil {
// return option.Some(c.Database)
// }
// return option.None[*DatabaseConfig]()
// },
// func(c Config, db *DatabaseConfig) Config {
// c.Database = db
// return c
// },
// )
//
// // Assert that the database config is present
// hasDatabaseConfig := assert.FromOptional(dbOptional)
//
// config := Config{Database: &DatabaseConfig{Host: "localhost", Port: 5432}}
// hasDatabaseConfig(config)(t) // Passes
//
// emptyConfig := Config{Database: nil}
// hasDatabaseConfig(emptyConfig)(t) // Fails
//
//go:inline
func FromOptional[S, T any](opt Optional[S, T]) reader.Reader[S, Reader] {
return fromOptionalGetter(opt.GetOption, "Optional: %s", opt)
}
// FromPrism creates an assertion that checks if a Prism can successfully extract a value.
// A Prism is an optic used to select part of a sum type (tagged union or variant).
//
// This function is useful when you have a Prism optic and want to assert that a value
// matches a specific variant of a sum type. The assertion passes if the Prism's GetOption
// returns Some (meaning the value is of the expected variant), and fails if it returns None
// (meaning the value is a different variant).
//
// This enables variant-focused testing where you verify that a value is of a particular
// type or matches a specific condition within a sum type.
//
// Parameters:
// - p: A Prism optic that focuses from type S to type T
//
// Returns:
// - A Reader that asserts the prism successfully extracts when applied to a value of type S
//
// Example:
//
// type Result interface{ isResult() }
// type Success struct{ Value int }
// type Failure struct{ Error string }
//
// func (Success) isResult() {}
// func (Failure) isResult() {}
//
// // Create a Prism that focuses on Success variant
// successPrism := prism.MakePrism(
// func(r Result) option.Option[int] {
// if s, ok := r.(Success); ok {
// return option.Some(s.Value)
// }
// return option.None[int]()
// },
// func(v int) Result { return Success{Value: v} },
// )
//
// // Assert that the result is a Success
// isSuccess := assert.FromPrism(successPrism)
//
// result1 := Success{Value: 42}
// isSuccess(result1)(t) // Passes
//
// result2 := Failure{Error: "something went wrong"}
// isSuccess(result2)(t) // Fails
//
//go:inline
func FromPrism[S, T any](p Prism[S, T]) reader.Reader[S, Reader] {
return fromOptionalGetter(p.GetOption, "Prism: %s", p)
}

View File

@@ -16,94 +16,677 @@
package assert
import (
"fmt"
"errors"
"testing"
"github.com/IBM/fp-go/v2/eq"
"github.com/IBM/fp-go/v2/optics/prism"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/result"
"github.com/stretchr/testify/assert"
S "github.com/IBM/fp-go/v2/string"
)
var (
errTest = fmt.Errorf("test failure")
// Eq is the equal predicate checking if objects are equal
Eq = eq.FromEquals(assert.ObjectsAreEqual)
)
func wrap1[T any](wrapped func(t assert.TestingT, expected, actual any, msgAndArgs ...any) bool, t *testing.T, expected T) result.Kleisli[T, T] {
return func(actual T) Result[T] {
ok := wrapped(t, expected, actual)
if ok {
return result.Of(actual)
func TestEqual(t *testing.T) {
t.Run("should pass when values are equal", func(t *testing.T) {
result := Equal(42)(42)(t)
if !result {
t.Error("Expected Equal to pass for equal values")
}
return result.Left[T](errTest)
}
}
// NotEqual tests if the expected and the actual values are not equal
func NotEqual[T any](t *testing.T, expected T) result.Kleisli[T, T] {
return wrap1(assert.NotEqual, t, expected)
}
// Equal tests if the expected and the actual values are equal
func Equal[T any](t *testing.T, expected T) result.Kleisli[T, T] {
return wrap1(assert.Equal, t, expected)
}
// Length tests if an array has the expected length
func Length[T any](t *testing.T, expected int) result.Kleisli[[]T, []T] {
return func(actual []T) Result[[]T] {
ok := assert.Len(t, actual, expected)
if ok {
return result.Of(actual)
}
return result.Left[[]T](errTest)
}
}
// NoError validates that there is no error
func NoError[T any](t *testing.T) result.Operator[T, T] {
return func(actual Result[T]) Result[T] {
return result.MonadFold(actual, func(e error) Result[T] {
assert.NoError(t, e)
return result.Left[T](e)
}, func(value T) Result[T] {
assert.NoError(t, nil)
return result.Of(value)
})
t.Run("should fail when values are not equal", func(t *testing.T) {
mockT := &testing.T{}
result := Equal(42)(43)(mockT)
if result {
t.Error("Expected Equal to fail for different values")
}
})
t.Run("should work with strings", func(t *testing.T) {
result := Equal("hello")("hello")(t)
if !result {
t.Error("Expected Equal to pass for equal strings")
}
})
}
// ArrayContains tests if a value is contained in an array
func ArrayContains[T any](t *testing.T, expected T) result.Kleisli[[]T, []T] {
return func(actual []T) Result[[]T] {
ok := assert.Contains(t, actual, expected)
if ok {
return result.Of(actual)
func TestNotEqual(t *testing.T) {
t.Run("should pass when values are not equal", func(t *testing.T) {
result := NotEqual(42)(43)(t)
if !result {
t.Error("Expected NotEqual to pass for different values")
}
return result.Left[[]T](errTest)
})
t.Run("should fail when values are equal", func(t *testing.T) {
mockT := &testing.T{}
result := NotEqual(42)(42)(mockT)
if result {
t.Error("Expected NotEqual to fail for equal values")
}
})
}
// ContainsKey tests if a key is contained in a map
func ContainsKey[T any, K comparable](t *testing.T, expected K) result.Kleisli[map[K]T, map[K]T] {
return func(actual map[K]T) Result[map[K]T] {
ok := assert.Contains(t, actual, expected)
if ok {
return result.Of(actual)
func TestArrayNotEmpty(t *testing.T) {
t.Run("should pass for non-empty array", func(t *testing.T) {
arr := []int{1, 2, 3}
result := ArrayNotEmpty(arr)(t)
if !result {
t.Error("Expected ArrayNotEmpty to pass for non-empty array")
}
return result.Left[map[K]T](errTest)
})
t.Run("should fail for empty array", func(t *testing.T) {
mockT := &testing.T{}
arr := []int{}
result := ArrayNotEmpty(arr)(mockT)
if result {
t.Error("Expected ArrayNotEmpty to fail for empty array")
}
})
}
// NotContainsKey tests if a key is not contained in a map
func NotContainsKey[T any, K comparable](t *testing.T, expected K) result.Kleisli[map[K]T, map[K]T] {
return func(actual map[K]T) Result[map[K]T] {
ok := assert.NotContains(t, actual, expected)
if ok {
return result.Of(actual)
func TestRecordNotEmpty(t *testing.T) {
t.Run("should pass for non-empty map", func(t *testing.T) {
mp := map[string]int{"a": 1, "b": 2}
result := RecordNotEmpty(mp)(t)
if !result {
t.Error("Expected RecordNotEmpty to pass for non-empty map")
}
return result.Left[map[K]T](errTest)
})
t.Run("should fail for empty map", func(t *testing.T) {
mockT := &testing.T{}
mp := map[string]int{}
result := RecordNotEmpty(mp)(mockT)
if result {
t.Error("Expected RecordNotEmpty to fail for empty map")
}
})
}
func TestArrayLength(t *testing.T) {
t.Run("should pass when length matches", func(t *testing.T) {
arr := []int{1, 2, 3}
result := ArrayLength[int](3)(arr)(t)
if !result {
t.Error("Expected ArrayLength to pass when length matches")
}
})
t.Run("should fail when length doesn't match", func(t *testing.T) {
mockT := &testing.T{}
arr := []int{1, 2, 3}
result := ArrayLength[int](5)(arr)(mockT)
if result {
t.Error("Expected ArrayLength to fail when length doesn't match")
}
})
t.Run("should work with empty array", func(t *testing.T) {
arr := []string{}
result := ArrayLength[string](0)(arr)(t)
if !result {
t.Error("Expected ArrayLength to pass for empty array with expected length 0")
}
})
}
func TestRecordLength(t *testing.T) {
t.Run("should pass when map length matches", func(t *testing.T) {
mp := map[string]int{"a": 1, "b": 2}
result := RecordLength[string, int](2)(mp)(t)
if !result {
t.Error("Expected RecordLength to pass when length matches")
}
})
t.Run("should fail when map length doesn't match", func(t *testing.T) {
mockT := &testing.T{}
mp := map[string]int{"a": 1}
result := RecordLength[string, int](3)(mp)(mockT)
if result {
t.Error("Expected RecordLength to fail when length doesn't match")
}
})
}
func TestStringLength(t *testing.T) {
t.Run("should pass when string length matches", func(t *testing.T) {
str := "hello"
result := StringLength[string, int](5)(str)(t)
if !result {
t.Error("Expected StringLength to pass when length matches")
}
})
t.Run("should fail when string length doesn't match", func(t *testing.T) {
mockT := &testing.T{}
str := "hello"
result := StringLength[string, int](10)(str)(mockT)
if result {
t.Error("Expected StringLength to fail when length doesn't match")
}
})
t.Run("should work with empty string", func(t *testing.T) {
str := ""
result := StringLength[string, int](0)(str)(t)
if !result {
t.Error("Expected StringLength to pass for empty string with expected length 0")
}
})
}
func TestNoError(t *testing.T) {
t.Run("should pass when error is nil", func(t *testing.T) {
result := NoError(nil)(t)
if !result {
t.Error("Expected NoError to pass when error is nil")
}
})
t.Run("should fail when error is not nil", func(t *testing.T) {
mockT := &testing.T{}
err := errors.New("test error")
result := NoError(err)(mockT)
if result {
t.Error("Expected NoError to fail when error is not nil")
}
})
}
func TestError(t *testing.T) {
t.Run("should pass when error is not nil", func(t *testing.T) {
err := errors.New("test error")
result := Error(err)(t)
if !result {
t.Error("Expected Error to pass when error is not nil")
}
})
t.Run("should fail when error is nil", func(t *testing.T) {
mockT := &testing.T{}
result := Error(nil)(mockT)
if result {
t.Error("Expected Error to fail when error is nil")
}
})
}
func TestSuccess(t *testing.T) {
t.Run("should pass for successful result", func(t *testing.T) {
res := result.Of(42)
result := Success(res)(t)
if !result {
t.Error("Expected Success to pass for successful result")
}
})
t.Run("should fail for error result", func(t *testing.T) {
mockT := &testing.T{}
res := result.Left[int](errors.New("test error"))
result := Success(res)(mockT)
if result {
t.Error("Expected Success to fail for error result")
}
})
}
func TestFailure(t *testing.T) {
t.Run("should pass for error result", func(t *testing.T) {
res := result.Left[int](errors.New("test error"))
result := Failure(res)(t)
if !result {
t.Error("Expected Failure to pass for error result")
}
})
t.Run("should fail for successful result", func(t *testing.T) {
mockT := &testing.T{}
res := result.Of(42)
result := Failure(res)(mockT)
if result {
t.Error("Expected Failure to fail for successful result")
}
})
}
func TestArrayContains(t *testing.T) {
t.Run("should pass when element is in array", func(t *testing.T) {
arr := []int{1, 2, 3, 4, 5}
result := ArrayContains(3)(arr)(t)
if !result {
t.Error("Expected ArrayContains to pass when element is in array")
}
})
t.Run("should fail when element is not in array", func(t *testing.T) {
mockT := &testing.T{}
arr := []int{1, 2, 3}
result := ArrayContains(10)(arr)(mockT)
if result {
t.Error("Expected ArrayContains to fail when element is not in array")
}
})
t.Run("should work with strings", func(t *testing.T) {
arr := []string{"apple", "banana", "cherry"}
result := ArrayContains("banana")(arr)(t)
if !result {
t.Error("Expected ArrayContains to pass for string element")
}
})
}
func TestContainsKey(t *testing.T) {
t.Run("should pass when key exists in map", func(t *testing.T) {
mp := map[string]int{"a": 1, "b": 2, "c": 3}
result := ContainsKey[int]("b")(mp)(t)
if !result {
t.Error("Expected ContainsKey to pass when key exists")
}
})
t.Run("should fail when key doesn't exist in map", func(t *testing.T) {
mockT := &testing.T{}
mp := map[string]int{"a": 1, "b": 2}
result := ContainsKey[int]("z")(mp)(mockT)
if result {
t.Error("Expected ContainsKey to fail when key doesn't exist")
}
})
}
func TestNotContainsKey(t *testing.T) {
t.Run("should pass when key doesn't exist in map", func(t *testing.T) {
mp := map[string]int{"a": 1, "b": 2}
result := NotContainsKey[int]("z")(mp)(t)
if !result {
t.Error("Expected NotContainsKey to pass when key doesn't exist")
}
})
t.Run("should fail when key exists in map", func(t *testing.T) {
mockT := &testing.T{}
mp := map[string]int{"a": 1, "b": 2}
result := NotContainsKey[int]("a")(mp)(mockT)
if result {
t.Error("Expected NotContainsKey to fail when key exists")
}
})
}
func TestThat(t *testing.T) {
t.Run("should pass when predicate is true", func(t *testing.T) {
isEven := func(n int) bool { return n%2 == 0 }
result := That(isEven)(42)(t)
if !result {
t.Error("Expected That to pass when predicate is true")
}
})
t.Run("should fail when predicate is false", func(t *testing.T) {
mockT := &testing.T{}
isEven := func(n int) bool { return n%2 == 0 }
result := That(isEven)(43)(mockT)
if result {
t.Error("Expected That to fail when predicate is false")
}
})
t.Run("should work with string predicates", func(t *testing.T) {
startsWithH := func(s string) bool { return S.IsNonEmpty(s) && s[0] == 'h' }
result := That(startsWithH)("hello")(t)
if !result {
t.Error("Expected That to pass for string predicate")
}
})
}
func TestAllOf(t *testing.T) {
t.Run("should pass when all assertions pass", func(t *testing.T) {
assertions := AllOf([]Reader{
Equal(42)(42),
Equal("hello")("hello"),
ArrayNotEmpty([]int{1, 2, 3}),
})
result := assertions(t)
if !result {
t.Error("Expected AllOf to pass when all assertions pass")
}
})
t.Run("should fail when any assertion fails", func(t *testing.T) {
mockT := &testing.T{}
assertions := AllOf([]Reader{
Equal(42)(42),
Equal("hello")("goodbye"),
ArrayNotEmpty([]int{1, 2, 3}),
})
result := assertions(mockT)
if result {
t.Error("Expected AllOf to fail when any assertion fails")
}
})
t.Run("should work with empty array", func(t *testing.T) {
assertions := AllOf([]Reader{})
result := assertions(t)
if !result {
t.Error("Expected AllOf to pass for empty array")
}
})
t.Run("should combine multiple array assertions", func(t *testing.T) {
arr := []int{1, 2, 3, 4, 5}
assertions := AllOf([]Reader{
ArrayNotEmpty(arr),
ArrayLength[int](5)(arr),
ArrayContains(3)(arr),
})
result := assertions(t)
if !result {
t.Error("Expected AllOf to pass for multiple array assertions")
}
})
}
func TestRunAll(t *testing.T) {
t.Run("should run all named test cases", func(t *testing.T) {
testcases := map[string]Reader{
"equality": Equal(42)(42),
"string_check": Equal("test")("test"),
"array_check": ArrayNotEmpty([]int{1, 2, 3}),
}
result := RunAll(testcases)(t)
if !result {
t.Error("Expected RunAll to pass when all test cases pass")
}
})
// Note: Testing failure behavior of RunAll is tricky because subtests
// will actually fail in the test framework. The function works correctly
// as demonstrated by the passing test above.
t.Run("should work with empty test cases", func(t *testing.T) {
testcases := map[string]Reader{}
result := RunAll(testcases)(t)
if !result {
t.Error("Expected RunAll to pass for empty test cases")
}
})
}
func TestEq(t *testing.T) {
t.Run("should return true for equal values", func(t *testing.T) {
if !Eq.Equals(42, 42) {
t.Error("Expected Eq to return true for equal integers")
}
})
t.Run("should return false for different values", func(t *testing.T) {
if Eq.Equals(42, 43) {
t.Error("Expected Eq to return false for different integers")
}
})
t.Run("should work with strings", func(t *testing.T) {
if !Eq.Equals("hello", "hello") {
t.Error("Expected Eq to return true for equal strings")
}
if Eq.Equals("hello", "world") {
t.Error("Expected Eq to return false for different strings")
}
})
t.Run("should work with slices", func(t *testing.T) {
arr1 := []int{1, 2, 3}
arr2 := []int{1, 2, 3}
if !Eq.Equals(arr1, arr2) {
t.Error("Expected Eq to return true for equal slices")
}
})
}
func TestLocal(t *testing.T) {
type User struct {
Name string
Age int
}
t.Run("should focus assertion on a property", func(t *testing.T) {
// Create an assertion that checks if age is positive
ageIsPositive := That(func(age int) bool { return age > 0 })
// Focus this assertion on the Age field of User
userAgeIsPositive := Local(func(u User) int { return u.Age })(ageIsPositive)
// Test with a user who has a positive age
user := User{Name: "Alice", Age: 30}
result := userAgeIsPositive(user)(t)
if !result {
t.Error("Expected focused assertion to pass for positive age")
}
})
t.Run("should fail when focused property doesn't match", func(t *testing.T) {
mockT := &testing.T{}
ageIsPositive := That(func(age int) bool { return age > 0 })
userAgeIsPositive := Local(func(u User) int { return u.Age })(ageIsPositive)
// Test with a user who has zero age
user := User{Name: "Bob", Age: 0}
result := userAgeIsPositive(user)(mockT)
if result {
t.Error("Expected focused assertion to fail for zero age")
}
})
t.Run("should compose with other assertions", func(t *testing.T) {
// Create multiple focused assertions
nameNotEmpty := Local(func(u User) string { return u.Name })(
That(S.IsNonEmpty),
)
ageInRange := Local(func(u User) int { return u.Age })(
That(func(age int) bool { return age >= 18 && age <= 100 }),
)
user := User{Name: "Charlie", Age: 25}
assertions := AllOf([]Reader{
nameNotEmpty(user),
ageInRange(user),
})
result := assertions(t)
if !result {
t.Error("Expected composed focused assertions to pass")
}
})
t.Run("should work with Equal assertion", func(t *testing.T) {
// Focus Equal assertion on Name field
nameIsAlice := Local(func(u User) string { return u.Name })(Equal("Alice"))
user := User{Name: "Alice", Age: 30}
result := nameIsAlice(user)(t)
if !result {
t.Error("Expected focused Equal assertion to pass")
}
})
}
func TestLocalL(t *testing.T) {
// Note: LocalL requires lens package which provides lens operations.
// This test demonstrates the concept, but actual usage would require
// proper lens definitions.
t.Run("conceptual test for LocalL", func(t *testing.T) {
// LocalL is similar to Local but uses lenses for focusing.
// It would be used like:
// validEmail := That(func(email string) bool { return strings.Contains(email, "@") })
// validPersonEmail := LocalL(emailLens)(validEmail)
//
// The actual implementation would require lens definitions from the lens package.
// This test serves as documentation for the intended usage.
})
}
func TestFromOptional(t *testing.T) {
type DatabaseConfig struct {
Host string
Port int
}
type Config struct {
Database *DatabaseConfig
}
// Create an Optional that focuses on the Database field
dbOptional := Optional[Config, *DatabaseConfig]{
GetOption: func(c Config) option.Option[*DatabaseConfig] {
if c.Database != nil {
return option.Of(c.Database)
}
return option.None[*DatabaseConfig]()
},
Set: func(db *DatabaseConfig) func(Config) Config {
return func(c Config) Config {
c.Database = db
return c
}
},
}
t.Run("should pass when optional value is present", func(t *testing.T) {
config := Config{Database: &DatabaseConfig{Host: "localhost", Port: 5432}}
hasDatabaseConfig := FromOptional(dbOptional)
result := hasDatabaseConfig(config)(t)
if !result {
t.Error("Expected FromOptional to pass when optional value is present")
}
})
t.Run("should fail when optional value is absent", func(t *testing.T) {
mockT := &testing.T{}
emptyConfig := Config{Database: nil}
hasDatabaseConfig := FromOptional(dbOptional)
result := hasDatabaseConfig(emptyConfig)(mockT)
if result {
t.Error("Expected FromOptional to fail when optional value is absent")
}
})
t.Run("should work with nested optionals", func(t *testing.T) {
type AdvancedSettings struct {
Cache bool
}
type Settings struct {
Advanced *AdvancedSettings
}
advancedOptional := Optional[Settings, *AdvancedSettings]{
GetOption: func(s Settings) option.Option[*AdvancedSettings] {
if s.Advanced != nil {
return option.Of(s.Advanced)
}
return option.None[*AdvancedSettings]()
},
Set: func(adv *AdvancedSettings) func(Settings) Settings {
return func(s Settings) Settings {
s.Advanced = adv
return s
}
},
}
settings := Settings{Advanced: &AdvancedSettings{Cache: true}}
hasAdvanced := FromOptional(advancedOptional)
result := hasAdvanced(settings)(t)
if !result {
t.Error("Expected FromOptional to pass for nested optional")
}
})
}
// Helper types for Prism testing
type PrismTestResult interface {
isPrismTestResult()
}
type PrismTestSuccess struct {
Value int
}
type PrismTestFailure struct {
Error string
}
func (PrismTestSuccess) isPrismTestResult() {}
func (PrismTestFailure) isPrismTestResult() {}
func TestFromPrism(t *testing.T) {
// Create a Prism that focuses on Success variant using prism.MakePrism
successPrism := prism.MakePrism(
func(r PrismTestResult) option.Option[int] {
if s, ok := r.(PrismTestSuccess); ok {
return option.Of(s.Value)
}
return option.None[int]()
},
func(v int) PrismTestResult {
return PrismTestSuccess{Value: v}
},
)
// Create a Prism that focuses on Failure variant
failurePrism := prism.MakePrism(
func(r PrismTestResult) option.Option[string] {
if f, ok := r.(PrismTestFailure); ok {
return option.Of(f.Error)
}
return option.None[string]()
},
func(err string) PrismTestResult {
return PrismTestFailure{Error: err}
},
)
t.Run("should pass when prism successfully extracts", func(t *testing.T) {
result := PrismTestSuccess{Value: 42}
isSuccess := FromPrism(successPrism)
testResult := isSuccess(result)(t)
if !testResult {
t.Error("Expected FromPrism to pass when prism extracts successfully")
}
})
t.Run("should fail when prism cannot extract", func(t *testing.T) {
mockT := &testing.T{}
result := PrismTestFailure{Error: "something went wrong"}
isSuccess := FromPrism(successPrism)
testResult := isSuccess(result)(mockT)
if testResult {
t.Error("Expected FromPrism to fail when prism cannot extract")
}
})
t.Run("should work with failure prism", func(t *testing.T) {
result := PrismTestFailure{Error: "test error"}
isFailure := FromPrism(failurePrism)
testResult := isFailure(result)(t)
if !testResult {
t.Error("Expected FromPrism to pass for failure prism on failure result")
}
})
t.Run("should fail with failure prism on success result", func(t *testing.T) {
mockT := &testing.T{}
result := PrismTestSuccess{Value: 100}
isFailure := FromPrism(failurePrism)
testResult := isFailure(result)(mockT)
if testResult {
t.Error("Expected FromPrism to fail for failure prism on success result")
}
})
}

235
v2/assert/example_test.go Normal file
View File

@@ -0,0 +1,235 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package assert_test
import (
"errors"
"strings"
"testing"
"github.com/IBM/fp-go/v2/assert"
"github.com/IBM/fp-go/v2/result"
)
// Example_basicAssertions demonstrates basic equality and inequality assertions
func Example_basicAssertions() {
// This would be in a real test function
var t *testing.T // placeholder for example
// Basic equality
value := 42
assert.Equal(42)(value)(t)
// String equality
name := "Alice"
assert.Equal("Alice")(name)(t)
// Inequality
assert.NotEqual(10)(value)(t)
}
// Example_arrayAssertions demonstrates array-related assertions
func Example_arrayAssertions() {
var t *testing.T // placeholder for example
numbers := []int{1, 2, 3, 4, 5}
// Check array is not empty
assert.ArrayNotEmpty(numbers)(t)
// Check array length
assert.ArrayLength[int](5)(numbers)(t)
// Check array contains a value
assert.ArrayContains(3)(numbers)(t)
}
// Example_mapAssertions demonstrates map-related assertions
func Example_mapAssertions() {
var t *testing.T // placeholder for example
config := map[string]int{
"timeout": 30,
"retries": 3,
"maxSize": 1000,
}
// Check map is not empty
assert.RecordNotEmpty(config)(t)
// Check map length
assert.RecordLength[string, int](3)(config)(t)
// Check map contains key
assert.ContainsKey[int]("timeout")(config)(t)
// Check map does not contain key
assert.NotContainsKey[int]("unknown")(config)(t)
}
// Example_errorAssertions demonstrates error-related assertions
func Example_errorAssertions() {
var t *testing.T // placeholder for example
// Assert no error
err := doSomethingSuccessful()
assert.NoError(err)(t)
// Assert error exists
err2 := doSomethingThatFails()
assert.Error(err2)(t)
}
// Example_resultAssertions demonstrates Result type assertions
func Example_resultAssertions() {
var t *testing.T // placeholder for example
// Assert success
successResult := result.Of[int](42)
assert.Success(successResult)(t)
// Assert failure
failureResult := result.Left[int](errors.New("something went wrong"))
assert.Failure(failureResult)(t)
}
// Example_predicateAssertions demonstrates custom predicate assertions
func Example_predicateAssertions() {
var t *testing.T // placeholder for example
// Test if a number is positive
isPositive := func(n int) bool { return n > 0 }
assert.That(isPositive)(42)(t)
// Test if a string is uppercase
isUppercase := func(s string) bool { return s == strings.ToUpper(s) }
assert.That(isUppercase)("HELLO")(t)
// Test if a number is even
isEven := func(n int) bool { return n%2 == 0 }
assert.That(isEven)(10)(t)
}
// Example_allOf demonstrates combining multiple assertions
func Example_allOf() {
var t *testing.T // placeholder for example
type User struct {
Name string
Age int
Active bool
}
user := User{Name: "Alice", Age: 30, Active: true}
// Combine multiple assertions
assertions := assert.AllOf([]assert.Reader{
assert.Equal("Alice")(user.Name),
assert.Equal(30)(user.Age),
assert.Equal(true)(user.Active),
})
assertions(t)
}
// Example_runAll demonstrates running named test cases
func Example_runAll() {
var t *testing.T // placeholder for example
testcases := map[string]assert.Reader{
"addition": assert.Equal(4)(2 + 2),
"multiplication": assert.Equal(6)(2 * 3),
"subtraction": assert.Equal(1)(3 - 2),
"division": assert.Equal(2)(10 / 5),
}
assert.RunAll(testcases)(t)
}
// Example_local demonstrates focusing assertions on specific properties
func Example_local() {
var t *testing.T // placeholder for example
type User struct {
Name string
Age int
}
// Create an assertion that checks if age is positive
ageIsPositive := assert.That(func(age int) bool { return age > 0 })
// Focus this assertion on the Age field of User
userAgeIsPositive := assert.Local(func(u User) int { return u.Age })(ageIsPositive)
// Now we can test the whole User object
user := User{Name: "Alice", Age: 30}
userAgeIsPositive(user)(t)
}
// Example_composableAssertions demonstrates building complex assertions
func Example_composableAssertions() {
var t *testing.T // placeholder for example
type Config struct {
Host string
Port int
Timeout int
Retries int
}
config := Config{
Host: "localhost",
Port: 8080,
Timeout: 30,
Retries: 3,
}
// Create focused assertions for each field
validHost := assert.Local(func(c Config) string { return c.Host })(
assert.StringNotEmpty,
)
validPort := assert.Local(func(c Config) int { return c.Port })(
assert.That(func(p int) bool { return p > 0 && p < 65536 }),
)
validTimeout := assert.Local(func(c Config) int { return c.Timeout })(
assert.That(func(t int) bool { return t > 0 }),
)
validRetries := assert.Local(func(c Config) int { return c.Retries })(
assert.That(func(r int) bool { return r >= 0 }),
)
// Combine all assertions
validConfig := assert.AllOf([]assert.Reader{
validHost(config),
validPort(config),
validTimeout(config),
validRetries(config),
})
validConfig(t)
}
// Helper functions for examples
func doSomethingSuccessful() error {
return nil
}
func doSomethingThatFails() error {
return errors.New("operation failed")
}

View File

@@ -1,7 +1,22 @@
package assert
import "github.com/IBM/fp-go/v2/result"
import (
"testing"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/optics/optional"
"github.com/IBM/fp-go/v2/optics/prism"
"github.com/IBM/fp-go/v2/predicate"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/result"
)
type (
Result[T any] = result.Result[T]
Reader = reader.Reader[*testing.T, bool]
Kleisli[T any] = reader.Reader[T, Reader]
Predicate[T any] = predicate.Predicate[T]
Lens[S, T any] = lens.Lens[S, T]
Optional[S, T any] = optional.Optional[S, T]
Prism[S, T any] = prism.Prism[S, T]
)

View File

@@ -8,5 +8,5 @@ import (
// BuilderPrism createa a [Prism] that converts between a builder and its type
func BuilderPrism[T any, B Builder[T]](creator func(T) B) Prism[B, T] {
return prism.MakePrism(F.Flow2(B.Build, result.ToOption[T]), creator)
return prism.MakePrismWithName(F.Flow2(B.Build, result.ToOption[T]), creator, "BuilderPrism")
}

View File

@@ -27,12 +27,14 @@ import (
"strings"
"text/template"
S "github.com/IBM/fp-go/v2/string"
C "github.com/urfave/cli/v2"
)
const (
keyLensDir = "dir"
keyVerbose = "verbose"
keyIncludeTestFile = "include-test-files"
lensAnnotation = "fp-go:Lens"
)
@@ -49,6 +51,13 @@ var (
Value: false,
Usage: "Enable verbose output",
}
flagIncludeTestFiles = &C.BoolFlag{
Name: keyIncludeTestFile,
Aliases: []string{"t"},
Value: false,
Usage: "Include test files (*_test.go) when scanning for annotated types",
}
)
// structInfo holds information about a struct that needs lens generation
@@ -67,6 +76,7 @@ type fieldInfo struct {
BaseType string // TypeName without leading * for pointer types
IsOptional bool // true if field is a pointer or has json omitempty tag
IsComparable bool // true if the type is comparable (can use ==)
IsEmbedded bool // true if this field comes from an embedded struct
}
// templateData holds data for template rendering
@@ -80,12 +90,12 @@ const lensStructTemplate = `
type {{.Name}}Lenses{{.TypeParams}} struct {
// mandatory fields
{{- range .Fields}}
{{.Name}} L.Lens[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{.Name}} __lens.Lens[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
// optional fields
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O LO.LensO[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{.Name}}O __lens_option.LensO[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
{{- end}}
}
@@ -94,13 +104,24 @@ type {{.Name}}Lenses{{.TypeParams}} struct {
type {{.Name}}RefLenses{{.TypeParams}} struct {
// mandatory fields
{{- range .Fields}}
{{.Name}} L.Lens[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{.Name}} __lens.Lens[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
// optional fields
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O LO.LensO[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{.Name}}O __lens_option.LensO[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
{{- end}}
// prisms
{{- range .Fields}}
{{.Name}}P __prism.Prism[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
}
// {{.Name}}Prisms provides prisms for accessing fields of {{.Name}}
type {{.Name}}Prisms{{.TypeParams}} struct {
{{- range .Fields}}
{{.Name}} __prism.Prism[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
}
`
@@ -110,15 +131,16 @@ const lensConstructorTemplate = `
func Make{{.Name}}Lenses{{.TypeParams}}() {{.Name}}Lenses{{.TypeParamNames}} {
// mandatory lenses
{{- range .Fields}}
lens{{.Name}} := L.MakeLens(
lens{{.Name}} := __lens.MakeLensWithName(
func(s {{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s {{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) {{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
"{{$.Name}}{{$.TypeParamNames}}.{{.Name}}",
)
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
lens{{.Name}}O := LO.FromIso[{{$.Name}}{{$.TypeParamNames}}](IO.FromZero[{{.TypeName}}]())(lens{{.Name}})
lens{{.Name}}O := __lens_option.FromIso[{{$.Name}}{{$.TypeParamNames}}](__iso_option.FromZero[{{.TypeName}}]())(lens{{.Name}})
{{- end}}
{{- end}}
return {{.Name}}Lenses{{.TypeParamNames}}{
@@ -140,21 +162,23 @@ func Make{{.Name}}RefLenses{{.TypeParams}}() {{.Name}}RefLenses{{.TypeParamNames
// mandatory lenses
{{- range .Fields}}
{{- if .IsComparable}}
lens{{.Name}} := L.MakeLensStrict(
lens{{.Name}} := __lens.MakeLensStrictWithName(
func(s *{{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) *{{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
"(*{{$.Name}}{{$.TypeParamNames}}).{{.Name}}",
)
{{- else}}
lens{{.Name}} := L.MakeLensRef(
lens{{.Name}} := __lens.MakeLensRefWithName(
func(s *{{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) *{{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
"(*{{$.Name}}{{$.TypeParamNames}}).{{.Name}}",
)
{{- end}}
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
lens{{.Name}}O := LO.FromIso[*{{$.Name}}{{$.TypeParamNames}}](IO.FromZero[{{.TypeName}}]())(lens{{.Name}})
lens{{.Name}}O := __lens_option.FromIso[*{{$.Name}}{{$.TypeParamNames}}](__iso_option.FromZero[{{.TypeName}}]())(lens{{.Name}})
{{- end}}
{{- end}}
return {{.Name}}RefLenses{{.TypeParamNames}}{
@@ -170,6 +194,47 @@ func Make{{.Name}}RefLenses{{.TypeParams}}() {{.Name}}RefLenses{{.TypeParamNames
{{- end}}
}
}
// Make{{.Name}}Prisms creates a new {{.Name}}Prisms with prisms for all fields
func Make{{.Name}}Prisms{{.TypeParams}}() {{.Name}}Prisms{{.TypeParamNames}} {
{{- range .Fields}}
{{- if .IsComparable}}
_fromNonZero{{.Name}} := __option.FromNonZero[{{.TypeName}}]()
_prism{{.Name}} := __prism.MakePrismWithName(
func(s {{$.Name}}{{$.TypeParamNames}}) __option.Option[{{.TypeName}}] { return _fromNonZero{{.Name}}(s.{{.Name}}) },
func(v {{.TypeName}}) {{$.Name}}{{$.TypeParamNames}} {
{{- if .IsEmbedded}}
var result {{$.Name}}{{$.TypeParamNames}}
result.{{.Name}} = v
return result
{{- else}}
return {{$.Name}}{{$.TypeParamNames}}{ {{.Name}}: v }
{{- end}}
},
"{{$.Name}}{{$.TypeParamNames}}.{{.Name}}",
)
{{- else}}
_prism{{.Name}} := __prism.MakePrismWithName(
func(s {{$.Name}}{{$.TypeParamNames}}) __option.Option[{{.TypeName}}] { return __option.Some(s.{{.Name}}) },
func(v {{.TypeName}}) {{$.Name}}{{$.TypeParamNames}} {
{{- if .IsEmbedded}}
var result {{$.Name}}{{$.TypeParamNames}}
result.{{.Name}} = v
return result
{{- else}}
return {{$.Name}}{{$.TypeParamNames}}{ {{.Name}}: v }
{{- end}}
},
"{{$.Name}}{{$.TypeParamNames}}.{{.Name}}",
)
{{- end}}
{{- end}}
return {{.Name}}Prisms{{.TypeParamNames}} {
{{- range .Fields}}
{{.Name}}: _prism{{.Name}},
{{- end}}
}
}
`
var (
@@ -439,7 +504,7 @@ func extractEmbeddedFields(embedType ast.Expr, fileImports map[string]string, fi
return results
}
if typeName == "" || typeIdent == nil {
if S.IsEmpty(typeName) || typeIdent == nil {
return results
}
@@ -494,6 +559,7 @@ func extractEmbeddedFields(embedType ast.Expr, fileImports map[string]string, fi
BaseType: baseType,
IsOptional: isOptional,
IsComparable: isComparable,
IsEmbedded: true,
},
fieldType: field.Type,
})
@@ -695,7 +761,7 @@ func parseFile(filename string) ([]structInfo, string, error) {
}
// generateLensHelpers scans a directory for Go files and generates lens code
func generateLensHelpers(dir, filename string, verbose bool) error {
func generateLensHelpers(dir, filename string, verbose, includeTestFiles bool) error {
// Get absolute path
absDir, err := filepath.Abs(dir)
if err != nil {
@@ -716,21 +782,34 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
log.Printf("Found %d Go files", len(files))
}
// Parse all files and collect structs
var allStructs []structInfo
// Parse all files and collect structs, separating test and non-test files
var regularStructs []structInfo
var testStructs []structInfo
var packageName string
for _, file := range files {
// Skip generated files and test files
if strings.HasSuffix(file, "_test.go") || strings.Contains(file, "gen.go") {
baseName := filepath.Base(file)
// Skip generated lens files (both regular and test)
if strings.HasPrefix(baseName, "gen_lens") && strings.HasSuffix(baseName, ".go") {
if verbose {
log.Printf("Skipping file: %s", filepath.Base(file))
log.Printf("Skipping generated lens file: %s", baseName)
}
continue
}
isTestFile := strings.HasSuffix(file, "_test.go")
// Skip test files unless includeTestFiles is true
if isTestFile && !includeTestFiles {
if verbose {
log.Printf("Skipping test file: %s", baseName)
}
continue
}
if verbose {
log.Printf("Parsing file: %s", filepath.Base(file))
log.Printf("Parsing file: %s", baseName)
}
structs, pkg, err := parseFile(file)
@@ -740,27 +819,52 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
}
if verbose && len(structs) > 0 {
log.Printf("Found %d annotated struct(s) in %s", len(structs), filepath.Base(file))
log.Printf("Found %d annotated struct(s) in %s", len(structs), baseName)
for _, s := range structs {
log.Printf(" - %s (%d fields)", s.Name, len(s.Fields))
}
}
if packageName == "" {
if S.IsEmpty(packageName) {
packageName = pkg
}
allStructs = append(allStructs, structs...)
// Separate structs based on source file type
if isTestFile {
testStructs = append(testStructs, structs...)
} else {
regularStructs = append(regularStructs, structs...)
}
}
if len(allStructs) == 0 {
if len(regularStructs) == 0 && len(testStructs) == 0 {
log.Printf("No structs with %s annotation found in %s", lensAnnotation, absDir)
return nil
}
// Generate regular lens file if there are regular structs
if len(regularStructs) > 0 {
if err := generateLensFile(absDir, filename, packageName, regularStructs, verbose); err != nil {
return err
}
}
// Generate test lens file if there are test structs
if len(testStructs) > 0 {
testFilename := strings.TrimSuffix(filename, ".go") + "_test.go"
if err := generateLensFile(absDir, testFilename, packageName, testStructs, verbose); err != nil {
return err
}
}
return nil
}
// generateLensFile generates a lens file for the given structs
func generateLensFile(absDir, filename, packageName string, structs []structInfo, verbose bool) error {
// Collect all unique imports from all structs
allImports := make(map[string]string) // import path -> alias
for _, s := range allStructs {
for _, s := range structs {
for importPath, alias := range s.Imports {
allImports[importPath] = alias
}
@@ -774,7 +878,7 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
}
defer f.Close()
log.Printf("Generating lens code in [%s] for package [%s] with [%d] structs ...", outPath, packageName, len(allStructs))
log.Printf("Generating lens code in [%s] for package [%s] with [%d] structs ...", outPath, packageName, len(structs))
// Write header
writePackage(f, packageName)
@@ -782,10 +886,11 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
// Write imports
f.WriteString("import (\n")
// Standard fp-go imports always needed
f.WriteString("\tL \"github.com/IBM/fp-go/v2/optics/lens\"\n")
f.WriteString("\tLO \"github.com/IBM/fp-go/v2/optics/lens/option\"\n")
// f.WriteString("\tO \"github.com/IBM/fp-go/v2/option\"\n")
f.WriteString("\tIO \"github.com/IBM/fp-go/v2/optics/iso/option\"\n")
f.WriteString("\t__lens \"github.com/IBM/fp-go/v2/optics/lens\"\n")
f.WriteString("\t__option \"github.com/IBM/fp-go/v2/option\"\n")
f.WriteString("\t__prism \"github.com/IBM/fp-go/v2/optics/prism\"\n")
f.WriteString("\t__lens_option \"github.com/IBM/fp-go/v2/optics/lens/option\"\n")
f.WriteString("\t__iso_option \"github.com/IBM/fp-go/v2/optics/iso/option\"\n")
// Add additional imports collected from field types
for importPath, alias := range allImports {
@@ -795,7 +900,7 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
f.WriteString(")\n")
// Generate lens code for each struct using templates
for _, s := range allStructs {
for _, s := range structs {
var buf bytes.Buffer
// Generate struct type
@@ -827,12 +932,14 @@ func LensCommand() *C.Command {
flagLensDir,
flagFilename,
flagVerbose,
flagIncludeTestFiles,
},
Action: func(ctx *C.Context) error {
return generateLensHelpers(
ctx.String(keyLensDir),
ctx.String(keyFilename),
ctx.Bool(keyVerbose),
ctx.Bool(keyIncludeTestFile),
)
},
}

View File

@@ -25,6 +25,7 @@ import (
"strings"
"testing"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -60,7 +61,7 @@ func TestHasLensAnnotation(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var doc *ast.CommentGroup
if tt.comment != "" {
if S.IsNonEmpty(tt.comment) {
doc = &ast.CommentGroup{
List: []*ast.Comment{
{Text: tt.comment},
@@ -289,7 +290,7 @@ func TestHasOmitEmpty(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var tag *ast.BasicLit
if tt.tag != "" {
if S.IsNonEmpty(tt.tag) {
tag = &ast.BasicLit{
Value: tt.tag,
}
@@ -326,7 +327,7 @@ type Other struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -380,7 +381,7 @@ type Config struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -440,7 +441,7 @@ type TypeTest struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -514,16 +515,16 @@ func TestLensRefTemplatesWithComparable(t *testing.T) {
assert.Contains(t, constructorStr, "func MakeTestStructRefLenses() TestStructRefLenses")
// Name field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "lensName := L.MakeLensStrict(",
"comparable field Name should use MakeLensStrict in RefLenses")
assert.Contains(t, constructorStr, "lensName := __lens.MakeLensStrictWithName(",
"comparable field Name should use MakeLensStrictWithName in RefLenses")
// Age field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "lensAge := L.MakeLensStrict(",
"comparable field Age should use MakeLensStrict in RefLenses")
assert.Contains(t, constructorStr, "lensAge := __lens.MakeLensStrictWithName(",
"comparable field Age should use MakeLensStrictWithName in RefLenses")
// Data field - not comparable, should use MakeLensRef
assert.Contains(t, constructorStr, "lensData := L.MakeLensRef(",
"non-comparable field Data should use MakeLensRef in RefLenses")
assert.Contains(t, constructorStr, "lensData := __lens.MakeLensRefWithName(",
"non-comparable field Data should use MakeLensRefWithName in RefLenses")
}
@@ -542,12 +543,12 @@ type TestStruct struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file exists
@@ -564,23 +565,23 @@ type TestStruct struct {
// Check for expected content in RefLenses
assert.Contains(t, contentStr, "MakeTestStructRefLenses")
// Name and Count are comparable, should use MakeLensStrict
assert.Contains(t, contentStr, "L.MakeLensStrict",
"comparable fields should use MakeLensStrict in RefLenses")
// Name and Count are comparable, should use MakeLensStrictWithName
assert.Contains(t, contentStr, "__lens.MakeLensStrictWithName",
"comparable fields should use MakeLensStrictWithName in RefLenses")
// Data is not comparable (slice), should use MakeLensRef
assert.Contains(t, contentStr, "L.MakeLensRef",
"non-comparable fields should use MakeLensRef in RefLenses")
// Data is not comparable (slice), should use MakeLensRefWithName
assert.Contains(t, contentStr, "__lens.MakeLensRefWithName",
"non-comparable fields should use MakeLensRefWithName in RefLenses")
// Verify the pattern appears for Name field (comparable)
namePattern := "lensName := L.MakeLensStrict("
namePattern := "lensName := __lens.MakeLensStrictWithName("
assert.Contains(t, contentStr, namePattern,
"Name field should use MakeLensStrict")
"Name field should use MakeLensStrictWithName")
// Verify the pattern appears for Data field (not comparable)
dataPattern := "lensData := L.MakeLensRef("
dataPattern := "lensData := __lens.MakeLensRefWithName("
assert.Contains(t, contentStr, dataPattern,
"Data field should use MakeLensRef")
"Data field should use MakeLensRefWithName")
}
func TestGenerateLensHelpers(t *testing.T) {
@@ -597,12 +598,12 @@ type TestStruct struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file exists
@@ -621,9 +622,9 @@ type TestStruct struct {
assert.Contains(t, contentStr, "Code generated by go generate")
assert.Contains(t, contentStr, "TestStructLenses")
assert.Contains(t, contentStr, "MakeTestStructLenses")
assert.Contains(t, contentStr, "L.Lens[TestStruct, string]")
assert.Contains(t, contentStr, "LO.LensO[TestStruct, *int]")
assert.Contains(t, contentStr, "IO.FromZero")
assert.Contains(t, contentStr, "__lens.Lens[TestStruct, string]")
assert.Contains(t, contentStr, "__lens_option.LensO[TestStruct, *int]")
assert.Contains(t, contentStr, "__iso_option.FromZero")
}
func TestGenerateLensHelpersNoAnnotations(t *testing.T) {
@@ -639,12 +640,12 @@ type TestStruct struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code (should not create file)
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file does not exist
@@ -669,10 +670,10 @@ func TestLensTemplates(t *testing.T) {
structStr := structBuf.String()
assert.Contains(t, structStr, "type TestStructLenses struct")
assert.Contains(t, structStr, "Name L.Lens[TestStruct, string]")
assert.Contains(t, structStr, "NameO LO.LensO[TestStruct, string]")
assert.Contains(t, structStr, "Value L.Lens[TestStruct, *int]")
assert.Contains(t, structStr, "ValueO LO.LensO[TestStruct, *int]")
assert.Contains(t, structStr, "Name __lens.Lens[TestStruct, string]")
assert.Contains(t, structStr, "NameO __lens_option.LensO[TestStruct, string]")
assert.Contains(t, structStr, "Value __lens.Lens[TestStruct, *int]")
assert.Contains(t, structStr, "ValueO __lens_option.LensO[TestStruct, *int]")
// Test constructor template
var constructorBuf bytes.Buffer
@@ -686,7 +687,7 @@ func TestLensTemplates(t *testing.T) {
assert.Contains(t, constructorStr, "NameO: lensNameO,")
assert.Contains(t, constructorStr, "Value: lensValue,")
assert.Contains(t, constructorStr, "ValueO: lensValueO,")
assert.Contains(t, constructorStr, "IO.FromZero")
assert.Contains(t, constructorStr, "__iso_option.FromZero")
}
func TestLensTemplatesWithOmitEmpty(t *testing.T) {
@@ -707,14 +708,14 @@ func TestLensTemplatesWithOmitEmpty(t *testing.T) {
structStr := structBuf.String()
assert.Contains(t, structStr, "type ConfigStructLenses struct")
assert.Contains(t, structStr, "Name L.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "NameO LO.LensO[ConfigStruct, string]")
assert.Contains(t, structStr, "Value L.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "ValueO LO.LensO[ConfigStruct, string]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Count L.Lens[ConfigStruct, int]")
assert.Contains(t, structStr, "CountO LO.LensO[ConfigStruct, int]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Pointer L.Lens[ConfigStruct, *string]")
assert.Contains(t, structStr, "PointerO LO.LensO[ConfigStruct, *string]")
assert.Contains(t, structStr, "Name __lens.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "NameO __lens_option.LensO[ConfigStruct, string]")
assert.Contains(t, structStr, "Value __lens.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "ValueO __lens_option.LensO[ConfigStruct, string]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Count __lens.Lens[ConfigStruct, int]")
assert.Contains(t, structStr, "CountO __lens_option.LensO[ConfigStruct, int]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Pointer __lens.Lens[ConfigStruct, *string]")
assert.Contains(t, structStr, "PointerO __lens_option.LensO[ConfigStruct, *string]")
// Test constructor template
var constructorBuf bytes.Buffer
@@ -723,9 +724,9 @@ func TestLensTemplatesWithOmitEmpty(t *testing.T) {
constructorStr := constructorBuf.String()
assert.Contains(t, constructorStr, "func MakeConfigStructLenses() ConfigStructLenses")
assert.Contains(t, constructorStr, "IO.FromZero[string]()")
assert.Contains(t, constructorStr, "IO.FromZero[int]()")
assert.Contains(t, constructorStr, "IO.FromZero[*string]()")
assert.Contains(t, constructorStr, "__iso_option.FromZero[string]()")
assert.Contains(t, constructorStr, "__iso_option.FromZero[int]()")
assert.Contains(t, constructorStr, "__iso_option.FromZero[*string]()")
}
func TestLensCommandFlags(t *testing.T) {
@@ -737,9 +738,9 @@ func TestLensCommandFlags(t *testing.T) {
assert.Contains(t, strings.ToLower(cmd.Description), "lenso", "Description should mention LensO for optional lenses")
// Check flags
assert.Len(t, cmd.Flags, 3)
assert.Len(t, cmd.Flags, 4)
var hasDir, hasFilename, hasVerbose bool
var hasDir, hasFilename, hasVerbose, hasIncludeTestFiles bool
for _, flag := range cmd.Flags {
switch flag.Names()[0] {
case "dir":
@@ -748,12 +749,15 @@ func TestLensCommandFlags(t *testing.T) {
hasFilename = true
case "verbose":
hasVerbose = true
case "include-test-files":
hasIncludeTestFiles = true
}
}
assert.True(t, hasDir, "should have dir flag")
assert.True(t, hasFilename, "should have filename flag")
assert.True(t, hasVerbose, "should have verbose flag")
assert.True(t, hasIncludeTestFiles, "should have include-test-files flag")
}
func TestParseFileWithEmbeddedStruct(t *testing.T) {
@@ -776,7 +780,7 @@ type Extended struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -824,12 +828,12 @@ type Person struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file exists
@@ -849,14 +853,14 @@ type Person struct {
assert.Contains(t, contentStr, "MakePersonLenses")
// Check that embedded fields are included
assert.Contains(t, contentStr, "Street L.Lens[Person, string]", "Should have lens for embedded Street field")
assert.Contains(t, contentStr, "City L.Lens[Person, string]", "Should have lens for embedded City field")
assert.Contains(t, contentStr, "Name L.Lens[Person, string]", "Should have lens for Name field")
assert.Contains(t, contentStr, "Age L.Lens[Person, int]", "Should have lens for Age field")
assert.Contains(t, contentStr, "Street __lens.Lens[Person, string]", "Should have lens for embedded Street field")
assert.Contains(t, contentStr, "City __lens.Lens[Person, string]", "Should have lens for embedded City field")
assert.Contains(t, contentStr, "Name __lens.Lens[Person, string]", "Should have lens for Name field")
assert.Contains(t, contentStr, "Age __lens.Lens[Person, int]", "Should have lens for Age field")
// Check that optional lenses are also generated for embedded fields
assert.Contains(t, contentStr, "StreetO LO.LensO[Person, string]")
assert.Contains(t, contentStr, "CityO LO.LensO[Person, string]")
assert.Contains(t, contentStr, "StreetO __lens_option.LensO[Person, string]")
assert.Contains(t, contentStr, "CityO __lens_option.LensO[Person, string]")
}
func TestParseFileWithPointerEmbeddedStruct(t *testing.T) {
@@ -880,7 +884,7 @@ type Document struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -922,7 +926,7 @@ type Container[T any] struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -960,7 +964,7 @@ type Pair[K comparable, V any] struct {
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Parse the file
@@ -998,12 +1002,12 @@ type Box[T any] struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file exists
@@ -1025,14 +1029,14 @@ type Box[T any] struct {
assert.Contains(t, contentStr, "func MakeBoxRefLenses[T any]() BoxRefLenses[T]", "Should have generic ref constructor")
// Check that fields use the generic type parameter
assert.Contains(t, contentStr, "Content L.Lens[Box[T], T]", "Should have lens for generic Content field")
assert.Contains(t, contentStr, "Label L.Lens[Box[T], string]", "Should have lens for Label field")
assert.Contains(t, contentStr, "Content __lens.Lens[Box[T], T]", "Should have lens for generic Content field")
assert.Contains(t, contentStr, "Label __lens.Lens[Box[T], string]", "Should have lens for Label field")
// Check optional lenses - only for comparable types
// T any is not comparable, so ContentO should NOT be generated
assert.NotContains(t, contentStr, "ContentO LO.LensO[Box[T], T]", "T any is not comparable, should not have optional lens")
assert.NotContains(t, contentStr, "ContentO __lens_option.LensO[Box[T], T]", "T any is not comparable, should not have optional lens")
// string is comparable, so LabelO should be generated
assert.Contains(t, contentStr, "LabelO LO.LensO[Box[T], string]", "string is comparable, should have optional lens")
assert.Contains(t, contentStr, "LabelO __lens_option.LensO[Box[T], string]", "string is comparable, should have optional lens")
}
func TestGenerateLensHelpersWithComparableTypeParam(t *testing.T) {
@@ -1049,12 +1053,12 @@ type ComparableBox[T comparable] struct {
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
err := os.WriteFile(testFile, []byte(testCode), 0o644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
err = generateLensHelpers(tmpDir, outputFile, false, false)
require.NoError(t, err)
// Verify the generated file exists
@@ -1074,11 +1078,11 @@ type ComparableBox[T comparable] struct {
assert.Contains(t, contentStr, "type ComparableBoxRefLenses[T comparable] struct", "Should have generic ComparableBoxRefLenses type")
// Check that Key field (with comparable constraint) uses MakeLensStrict in RefLenses
assert.Contains(t, contentStr, "lensKey := L.MakeLensStrict(", "Key field with comparable constraint should use MakeLensStrict")
assert.Contains(t, contentStr, "lensKey := __lens.MakeLensStrictWithName(", "Key field with comparable constraint should use MakeLensStrictWithName")
// Check that Value field (string, always comparable) also uses MakeLensStrict
assert.Contains(t, contentStr, "lensValue := L.MakeLensStrict(", "Value field (string) should use MakeLensStrict")
assert.Contains(t, contentStr, "lensValue := __lens.MakeLensStrictWithName(", "Value field (string) should use MakeLensStrictWithName")
// Verify that MakeLensRef is NOT used (since both fields are comparable)
assert.NotContains(t, contentStr, "L.MakeLensRef(", "Should not use MakeLensRef when all fields are comparable")
assert.NotContains(t, contentStr, "__lens.MakeLensRefWithName(", "Should not use MakeLensRefWithName when all fields are comparable")
}

View File

@@ -19,6 +19,8 @@ import (
"fmt"
"os"
"strings"
S "github.com/IBM/fp-go/v2/string"
)
// Deprecated:
@@ -176,7 +178,7 @@ func generateTraverseTuple1(
}
fmt.Fprintf(f, "F%d ~func(A%d) %s", j+1, j+1, hkt(fmt.Sprintf("T%d", j+1)))
}
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, ", %s", infix)
}
// types
@@ -209,7 +211,7 @@ func generateTraverseTuple1(
fmt.Fprintf(f, " return A.TraverseTuple%d(\n", i)
// map
fmt.Fprintf(f, " Map[")
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, "%s, T1,", infix)
} else {
fmt.Fprintf(f, "T1,")
@@ -231,7 +233,7 @@ func generateTraverseTuple1(
fmt.Fprintf(f, " ")
}
fmt.Fprintf(f, "%s", tuple)
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, ", %s", infix)
}
fmt.Fprintf(f, ", T%d],\n", j+1)
@@ -256,11 +258,11 @@ func generateSequenceTuple1(
fmt.Fprintf(f, "\n// SequenceTuple%d converts a [Tuple%d] of [%s] into an [%s].\n", i, i, hkt("T"), hkt(fmt.Sprintf("Tuple%d", i)))
fmt.Fprintf(f, "func SequenceTuple%d[", i)
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, "%s", infix)
}
for j := 0; j < i; j++ {
if infix != "" || j > 0 {
if S.IsNonEmpty(infix) || j > 0 {
fmt.Fprintf(f, ", ")
}
fmt.Fprintf(f, "T%d", j+1)
@@ -276,7 +278,7 @@ func generateSequenceTuple1(
fmt.Fprintf(f, " return A.SequenceTuple%d(\n", i)
// map
fmt.Fprintf(f, " Map[")
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, "%s, T1,", infix)
} else {
fmt.Fprintf(f, "T1,")
@@ -298,7 +300,7 @@ func generateSequenceTuple1(
fmt.Fprintf(f, " ")
}
fmt.Fprintf(f, "%s", tuple)
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, ", %s", infix)
}
fmt.Fprintf(f, ", T%d],\n", j+1)
@@ -319,11 +321,11 @@ func generateSequenceT1(
fmt.Fprintf(f, "\n// SequenceT%d converts %d parameters of [%s] into a [%s].\n", i, i, hkt("T"), hkt(fmt.Sprintf("Tuple%d", i)))
fmt.Fprintf(f, "func SequenceT%d[", i)
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, "%s", infix)
}
for j := 0; j < i; j++ {
if infix != "" || j > 0 {
if S.IsNonEmpty(infix) || j > 0 {
fmt.Fprintf(f, ", ")
}
fmt.Fprintf(f, "T%d", j+1)
@@ -339,7 +341,7 @@ func generateSequenceT1(
fmt.Fprintf(f, " return A.SequenceT%d(\n", i)
// map
fmt.Fprintf(f, " Map[")
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, "%s, T1,", infix)
} else {
fmt.Fprintf(f, "T1,")
@@ -361,7 +363,7 @@ func generateSequenceT1(
fmt.Fprintf(f, " ")
}
fmt.Fprintf(f, "%s", tuple)
if infix != "" {
if S.IsNonEmpty(infix) {
fmt.Fprintf(f, ", %s", infix)
}
fmt.Fprintf(f, ", T%d],\n", j+1)

177
v2/consumer/consumer.go Normal file
View File

@@ -0,0 +1,177 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package consumer
// Local transforms a Consumer by preprocessing its input through a function.
// This is the contravariant map operation for Consumers, analogous to reader.Local
// but operating on the input side rather than the output side.
//
// Given a Consumer[R1] that consumes values of type R1, and a function f that
// converts R2 to R1, Local creates a new Consumer[R2] that:
// 1. Takes a value of type R2
// 2. Applies f to convert it to R1
// 3. Passes the result to the original Consumer[R1]
//
// This is particularly useful for adapting consumers to work with different input types,
// similar to how reader.Local adapts readers to work with different environment types.
//
// Comparison with reader.Local:
// - reader.Local: Transforms the environment BEFORE passing it to a Reader (preprocessing input)
// - consumer.Local: Transforms the value BEFORE passing it to a Consumer (preprocessing input)
// - Both are contravariant operations on the input type
// - Reader produces output, Consumer performs side effects
//
// Type Parameters:
// - R2: The input type of the new Consumer (what you have)
// - R1: The input type of the original Consumer (what it expects)
//
// Parameters:
// - f: A function that converts R2 to R1 (preprocessing function)
//
// Returns:
// - An Operator that transforms Consumer[R1] into Consumer[R2]
//
// Example - Basic type adaptation:
//
// // Consumer that logs integers
// logInt := func(x int) {
// fmt.Printf("Value: %d\n", x)
// }
//
// // Adapt it to consume strings by parsing them first
// parseToInt := func(s string) int {
// n, _ := strconv.Atoi(s)
// return n
// }
//
// logString := consumer.Local(parseToInt)(logInt)
// logString("42") // Logs: "Value: 42"
//
// Example - Extracting fields from structs:
//
// type User struct {
// Name string
// Age int
// }
//
// // Consumer that logs names
// logName := func(name string) {
// fmt.Printf("Name: %s\n", name)
// }
//
// // Adapt it to consume User structs
// extractName := func(u User) string {
// return u.Name
// }
//
// logUser := consumer.Local(extractName)(logName)
// logUser(User{Name: "Alice", Age: 30}) // Logs: "Name: Alice"
//
// Example - Simplifying complex types:
//
// type DetailedConfig struct {
// Host string
// Port int
// Timeout time.Duration
// MaxRetry int
// }
//
// type SimpleConfig struct {
// Host string
// Port int
// }
//
// // Consumer that logs simple configs
// logSimple := func(c SimpleConfig) {
// fmt.Printf("Server: %s:%d\n", c.Host, c.Port)
// }
//
// // Adapt it to consume detailed configs
// simplify := func(d DetailedConfig) SimpleConfig {
// return SimpleConfig{Host: d.Host, Port: d.Port}
// }
//
// logDetailed := consumer.Local(simplify)(logSimple)
// logDetailed(DetailedConfig{
// Host: "localhost",
// Port: 8080,
// Timeout: time.Second,
// MaxRetry: 3,
// }) // Logs: "Server: localhost:8080"
//
// Example - Composing multiple transformations:
//
// type Response struct {
// StatusCode int
// Body string
// }
//
// // Consumer that logs status codes
// logStatus := func(code int) {
// fmt.Printf("Status: %d\n", code)
// }
//
// // Extract status code from response
// getStatus := func(r Response) int {
// return r.StatusCode
// }
//
// // Adapt to consume responses
// logResponse := consumer.Local(getStatus)(logStatus)
// logResponse(Response{StatusCode: 200, Body: "OK"}) // Logs: "Status: 200"
//
// Example - Using with multiple consumers:
//
// type Event struct {
// Type string
// Timestamp time.Time
// Data map[string]any
// }
//
// // Consumers for different aspects
// logType := func(t string) { fmt.Printf("Type: %s\n", t) }
// logTime := func(t time.Time) { fmt.Printf("Time: %v\n", t) }
//
// // Adapt them to consume events
// logEventType := consumer.Local(func(e Event) string { return e.Type })(logType)
// logEventTime := consumer.Local(func(e Event) time.Time { return e.Timestamp })(logTime)
//
// event := Event{Type: "UserLogin", Timestamp: time.Now(), Data: nil}
// logEventType(event) // Logs: "Type: UserLogin"
// logEventTime(event) // Logs: "Time: ..."
//
// Use Cases:
// - Type adaptation: Convert between different input types
// - Field extraction: Extract specific fields from complex structures
// - Data transformation: Preprocess data before consumption
// - Interface adaptation: Adapt consumers to work with different interfaces
// - Logging pipelines: Transform data before logging
// - Event handling: Extract relevant data from events before processing
//
// Relationship to Reader:
// Consumer is the dual of Reader in category theory:
// - Reader[R, A] = R -> A (produces output from environment)
// - Consumer[A] = A -> () (consumes input, produces side effects)
// - reader.Local transforms the environment before reading
// - consumer.Local transforms the input before consuming
// - Both are contravariant functors on their input type
func Local[R2, R1 any](f func(R2) R1) Operator[R1, R2] {
return func(c Consumer[R1]) Consumer[R2] {
return func(r2 R2) {
c(f(r2))
}
}
}

View File

@@ -0,0 +1,383 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package consumer
import (
"strconv"
"testing"
"time"
"github.com/IBM/fp-go/v2/function"
"github.com/stretchr/testify/assert"
)
func TestLocal(t *testing.T) {
t.Run("basic type transformation", func(t *testing.T) {
var captured int
consumeInt := func(x int) {
captured = x
}
// Transform string to int before consuming
stringToInt := func(s string) int {
n, _ := strconv.Atoi(s)
return n
}
consumeString := Local(stringToInt)(consumeInt)
consumeString("42")
assert.Equal(t, 42, captured)
})
t.Run("field extraction from struct", func(t *testing.T) {
type User struct {
Name string
Age int
}
var capturedName string
consumeName := func(name string) {
capturedName = name
}
extractName := func(u User) string {
return u.Name
}
consumeUser := Local(extractName)(consumeName)
consumeUser(User{Name: "Alice", Age: 30})
assert.Equal(t, "Alice", capturedName)
})
t.Run("simplifying complex types", func(t *testing.T) {
type DetailedConfig struct {
Host string
Port int
Timeout time.Duration
MaxRetry int
}
type SimpleConfig struct {
Host string
Port int
}
var captured SimpleConfig
consumeSimple := func(c SimpleConfig) {
captured = c
}
simplify := func(d DetailedConfig) SimpleConfig {
return SimpleConfig{Host: d.Host, Port: d.Port}
}
consumeDetailed := Local(simplify)(consumeSimple)
consumeDetailed(DetailedConfig{
Host: "localhost",
Port: 8080,
Timeout: time.Second,
MaxRetry: 3,
})
assert.Equal(t, SimpleConfig{Host: "localhost", Port: 8080}, captured)
})
t.Run("multiple transformations", func(t *testing.T) {
type Response struct {
StatusCode int
Body string
}
var capturedStatus int
consumeStatus := func(code int) {
capturedStatus = code
}
getStatus := func(r Response) int {
return r.StatusCode
}
consumeResponse := Local(getStatus)(consumeStatus)
consumeResponse(Response{StatusCode: 200, Body: "OK"})
assert.Equal(t, 200, capturedStatus)
})
t.Run("chaining Local transformations", func(t *testing.T) {
type Level3 struct{ Value int }
type Level2 struct{ L3 Level3 }
type Level1 struct{ L2 Level2 }
var captured int
consumeInt := func(x int) {
captured = x
}
// Chain multiple Local transformations
extract3 := func(l3 Level3) int { return l3.Value }
extract2 := func(l2 Level2) Level3 { return l2.L3 }
extract1 := func(l1 Level1) Level2 { return l1.L2 }
// Compose the transformations
consumeLevel3 := Local(extract3)(consumeInt)
consumeLevel2 := Local(extract2)(consumeLevel3)
consumeLevel1 := Local(extract1)(consumeLevel2)
consumeLevel1(Level1{L2: Level2{L3: Level3{Value: 42}}})
assert.Equal(t, 42, captured)
})
t.Run("identity transformation", func(t *testing.T) {
var captured string
consumeString := func(s string) {
captured = s
}
identity := function.Identity[string]
consumeIdentity := Local(identity)(consumeString)
consumeIdentity("test")
assert.Equal(t, "test", captured)
})
t.Run("transformation with calculation", func(t *testing.T) {
type Rectangle struct {
Width int
Height int
}
var capturedArea int
consumeArea := func(area int) {
capturedArea = area
}
calculateArea := func(r Rectangle) int {
return r.Width * r.Height
}
consumeRectangle := Local(calculateArea)(consumeArea)
consumeRectangle(Rectangle{Width: 5, Height: 10})
assert.Equal(t, 50, capturedArea)
})
t.Run("multiple consumers with same transformation", func(t *testing.T) {
type Event struct {
Type string
Timestamp time.Time
}
var capturedType string
var capturedTime time.Time
consumeType := func(t string) {
capturedType = t
}
consumeTime := func(t time.Time) {
capturedTime = t
}
extractType := func(e Event) string { return e.Type }
extractTime := func(e Event) time.Time { return e.Timestamp }
consumeEventType := Local(extractType)(consumeType)
consumeEventTime := Local(extractTime)(consumeTime)
now := time.Now()
event := Event{Type: "UserLogin", Timestamp: now}
consumeEventType(event)
consumeEventTime(event)
assert.Equal(t, "UserLogin", capturedType)
assert.Equal(t, now, capturedTime)
})
t.Run("transformation with slice", func(t *testing.T) {
var captured int
consumeLength := func(n int) {
captured = n
}
getLength := func(s []string) int {
return len(s)
}
consumeSlice := Local(getLength)(consumeLength)
consumeSlice([]string{"a", "b", "c"})
assert.Equal(t, 3, captured)
})
t.Run("transformation with map", func(t *testing.T) {
var captured int
consumeCount := func(n int) {
captured = n
}
getCount := func(m map[string]int) int {
return len(m)
}
consumeMap := Local(getCount)(consumeCount)
consumeMap(map[string]int{"a": 1, "b": 2, "c": 3})
assert.Equal(t, 3, captured)
})
t.Run("transformation with pointer", func(t *testing.T) {
var captured int
consumeInt := func(x int) {
captured = x
}
dereference := func(p *int) int {
if p == nil {
return 0
}
return *p
}
consumePointer := Local(dereference)(consumeInt)
value := 42
consumePointer(&value)
assert.Equal(t, 42, captured)
consumePointer(nil)
assert.Equal(t, 0, captured)
})
t.Run("transformation with custom type", func(t *testing.T) {
type MyType struct {
Value string
}
var captured string
consumeString := func(s string) {
captured = s
}
extractValue := func(m MyType) string {
return m.Value
}
consumeMyType := Local(extractValue)(consumeString)
consumeMyType(MyType{Value: "test"})
assert.Equal(t, "test", captured)
})
t.Run("accumulation through multiple calls", func(t *testing.T) {
var sum int
accumulate := func(x int) {
sum += x
}
double := func(x int) int {
return x * 2
}
accumulateDoubled := Local(double)(accumulate)
accumulateDoubled(1)
accumulateDoubled(2)
accumulateDoubled(3)
assert.Equal(t, 12, sum) // (1*2) + (2*2) + (3*2) = 2 + 4 + 6 = 12
})
t.Run("transformation with error handling", func(t *testing.T) {
type Result struct {
Value int
Error error
}
var captured int
consumeInt := func(x int) {
captured = x
}
extractValue := func(r Result) int {
if r.Error != nil {
return -1
}
return r.Value
}
consumeResult := Local(extractValue)(consumeInt)
consumeResult(Result{Value: 42, Error: nil})
assert.Equal(t, 42, captured)
consumeResult(Result{Value: 100, Error: assert.AnError})
assert.Equal(t, -1, captured)
})
t.Run("transformation preserves consumer behavior", func(t *testing.T) {
callCount := 0
consumer := func(x int) {
callCount++
}
transform := func(s string) int {
n, _ := strconv.Atoi(s)
return n
}
transformedConsumer := Local(transform)(consumer)
transformedConsumer("1")
transformedConsumer("2")
transformedConsumer("3")
assert.Equal(t, 3, callCount)
})
t.Run("comparison with reader.Local behavior", func(t *testing.T) {
// This test demonstrates the dual nature of Consumer and Reader
// Consumer: transforms input before consumption (contravariant)
// Reader: transforms environment before reading (also contravariant on input)
type DetailedEnv struct {
Value int
Extra string
}
type SimpleEnv struct {
Value int
}
var captured int
consumeSimple := func(e SimpleEnv) {
captured = e.Value
}
simplify := func(d DetailedEnv) SimpleEnv {
return SimpleEnv{Value: d.Value}
}
consumeDetailed := Local(simplify)(consumeSimple)
consumeDetailed(DetailedEnv{Value: 42, Extra: "ignored"})
assert.Equal(t, 42, captured)
})
}

56
v2/consumer/types.go Normal file
View File

@@ -0,0 +1,56 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package consumer provides types and utilities for functions that consume values without returning results.
//
// A Consumer represents a side-effecting operation that accepts a value but produces no output.
// This is useful for operations like logging, printing, updating state, or any action where
// the return value is not needed.
package consumer
type (
// Consumer represents a function that accepts a value of type A and performs a side effect.
// It does not return any value, making it useful for operations where only the side effect matters,
// such as logging, printing, or updating external state.
//
// This is a fundamental concept in functional programming for handling side effects in a
// controlled manner. Consumers can be composed, chained, or used in higher-order functions
// to build complex side-effecting behaviors.
//
// Type Parameters:
// - A: The type of value consumed by the function
//
// Example:
//
// // A simple consumer that prints values
// var printInt Consumer[int] = func(x int) {
// fmt.Println(x)
// }
// printInt(42) // Prints: 42
//
// // A consumer that logs messages
// var logger Consumer[string] = func(msg string) {
// log.Println(msg)
// }
// logger("Hello, World!") // Logs: Hello, World!
//
// // Consumers can be used in functional pipelines
// var saveToDatabase Consumer[User] = func(user User) {
// db.Save(user)
// }
Consumer[A any] = func(A)
Operator[A, B any] = func(Consumer[A]) Consumer[B]
)

View File

@@ -24,8 +24,8 @@ import (
// withContext wraps an existing IOEither and performs a context check for cancellation before delegating
func WithContext[A any](ctx context.Context, ma IOResult[A]) IOResult[A] {
return func() Result[A] {
if err := context.Cause(ctx); err != nil {
return result.Left[A](err)
if ctx.Err() != nil {
return result.Left[A](context.Cause(ctx))
}
return ma()
}

View File

@@ -0,0 +1,16 @@
package readerio
import (
RIO "github.com/IBM/fp-go/v2/readerio"
)
//go:inline
func Bracket[
A, B, ANY any](
acquire ReaderIO[A],
use Kleisli[A, B],
release func(A, B) ReaderIO[ANY],
) ReaderIO[B] {
return RIO.Bracket(acquire, use, release)
}

View File

@@ -0,0 +1,13 @@
package readerio
import "github.com/IBM/fp-go/v2/io"
//go:inline
func ChainConsumer[A any](c Consumer[A]) Operator[A, struct{}] {
return ChainIOK(io.FromConsumerK(c))
}
//go:inline
func ChainFirstConsumer[A any](c Consumer[A]) Operator[A, A] {
return ChainFirstIOK(io.FromConsumerK(c))
}

View File

@@ -0,0 +1,20 @@
package readerio
import (
"context"
"github.com/IBM/fp-go/v2/reader"
RIO "github.com/IBM/fp-go/v2/readerio"
)
//go:inline
func SequenceReader[R, A any](ma ReaderIO[Reader[R, A]]) Reader[R, ReaderIO[A]] {
return RIO.SequenceReader(ma)
}
//go:inline
func TraverseReader[R, A, B any](
f reader.Kleisli[R, A, B],
) func(ReaderIO[A]) Kleisli[R, B] {
return RIO.TraverseReader[context.Context](f)
}

View File

@@ -0,0 +1,29 @@
package readerio
import (
"context"
"log/slog"
"github.com/IBM/fp-go/v2/logging"
)
func SLogWithCallback[A any](
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
message string) Kleisli[A, A] {
return func(a A) ReaderIO[A] {
return func(ctx context.Context) IO[A] {
// logger
logger := cb(ctx)
return func() A {
logger.LogAttrs(ctx, logLevel, message, slog.Any("value", a))
return a
}
}
}
}
//go:inline
func SLog[A any](message string) Kleisli[A, A] {
return SLogWithCallback[A](slog.LevelInfo, logging.GetLoggerFromContext, message)
}

View File

@@ -0,0 +1,769 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerio
import (
"context"
"time"
"github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/reader"
RIO "github.com/IBM/fp-go/v2/readerio"
)
const (
// useParallel is the feature flag to control if we use the parallel or the sequential implementation of ap
useParallel = true
)
// MonadMap transforms the success value of a [ReaderIO] using the provided function.
// If the computation fails, the error is propagated unchanged.
//
// Parameters:
// - fa: The ReaderIO to transform
// - f: The transformation function
//
// Returns a new ReaderIO with the transformed value.
//
//go:inline
func MonadMap[A, B any](fa ReaderIO[A], f func(A) B) ReaderIO[B] {
return RIO.MonadMap(fa, f)
}
// Map transforms the success value of a [ReaderIO] using the provided function.
// This is the curried version of [MonadMap], useful for composition.
//
// Parameters:
// - f: The transformation function
//
// Returns a function that transforms a ReaderIO.
//
//go:inline
func Map[A, B any](f func(A) B) Operator[A, B] {
return RIO.Map[context.Context](f)
}
// MonadMapTo replaces the success value of a [ReaderIO] with a constant value.
// If the computation fails, the error is propagated unchanged.
//
// Parameters:
// - fa: The ReaderIO to transform
// - b: The constant value to use
//
// Returns a new ReaderIO with the constant value.
//
//go:inline
func MonadMapTo[A, B any](fa ReaderIO[A], b B) ReaderIO[B] {
return RIO.MonadMapTo(fa, b)
}
// MapTo replaces the success value of a [ReaderIO] with a constant value.
// This is the curried version of [MonadMapTo].
//
// Parameters:
// - b: The constant value to use
//
// Returns a function that transforms a ReaderIO.
//
//go:inline
func MapTo[A, B any](b B) Operator[A, B] {
return RIO.MapTo[context.Context, A](b)
}
// MonadChain sequences two [ReaderIO] computations, where the second depends on the result of the first.
// If the first computation fails, the second is not executed.
//
// Parameters:
// - ma: The first ReaderIO
// - f: Function that produces the second ReaderIO based on the first's result
//
// Returns a new ReaderIO representing the sequenced computation.
//
//go:inline
func MonadChain[A, B any](ma ReaderIO[A], f Kleisli[A, B]) ReaderIO[B] {
return RIO.MonadChain(ma, f)
}
// Chain sequences two [ReaderIO] computations, where the second depends on the result of the first.
// This is the curried version of [MonadChain], useful for composition.
//
// Parameters:
// - f: Function that produces the second ReaderIO based on the first's result
//
// Returns a function that sequences ReaderIO computations.
//
//go:inline
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B] {
return RIO.Chain(f)
}
// MonadChainFirst sequences two [ReaderIO] computations but returns the result of the first.
// The second computation is executed for its side effects only.
//
// Parameters:
// - ma: The first ReaderIO
// - f: Function that produces the second ReaderIO
//
// Returns a ReaderIO with the result of the first computation.
//
//go:inline
func MonadChainFirst[A, B any](ma ReaderIO[A], f Kleisli[A, B]) ReaderIO[A] {
return RIO.MonadChainFirst(ma, f)
}
// MonadTap executes a side-effect computation but returns the original value.
// This is an alias for [MonadChainFirst] and is useful for operations like logging
// or validation that should not affect the main computation flow.
//
// Parameters:
// - ma: The ReaderIO to tap
// - f: Function that produces a side-effect ReaderIO
//
// Returns a ReaderIO with the original value after executing the side effect.
//
//go:inline
func MonadTap[A, B any](ma ReaderIO[A], f Kleisli[A, B]) ReaderIO[A] {
return RIO.MonadTap(ma, f)
}
// ChainFirst sequences two [ReaderIO] computations but returns the result of the first.
// This is the curried version of [MonadChainFirst].
//
// Parameters:
// - f: Function that produces the second ReaderIO
//
// Returns a function that sequences ReaderIO computations.
//
//go:inline
func ChainFirst[A, B any](f Kleisli[A, B]) Operator[A, A] {
return RIO.ChainFirst(f)
}
// Tap executes a side-effect computation but returns the original value.
// This is the curried version of [MonadTap], an alias for [ChainFirst].
//
// Parameters:
// - f: Function that produces a side-effect ReaderIO
//
// Returns a function that taps ReaderIO computations.
//
//go:inline
func Tap[A, B any](f Kleisli[A, B]) Operator[A, A] {
return RIO.Tap(f)
}
// Of creates a [ReaderIO] that always succeeds with the given value.
// This is the same as [Right] and represents the monadic return operation.
//
// Parameters:
// - a: The value to wrap
//
// Returns a ReaderIO that always succeeds with the given value.
//
//go:inline
func Of[A any](a A) ReaderIO[A] {
return RIO.Of[context.Context](a)
}
// MonadApPar implements parallel applicative application for [ReaderIO].
// It executes the function and value computations in parallel where possible,
// potentially improving performance for independent operations.
//
// Parameters:
// - fab: ReaderIO containing a function
// - fa: ReaderIO containing a value
//
// Returns a ReaderIO with the function applied to the value.
//
//go:inline
func MonadApPar[B, A any](fab ReaderIO[func(A) B], fa ReaderIO[A]) ReaderIO[B] {
return RIO.MonadApPar(fab, fa)
}
// MonadAp implements applicative application for [ReaderIO].
// By default, it uses parallel execution ([MonadApPar]) but can be configured to use
// sequential execution ([MonadApSeq]) via the useParallel constant.
//
// Parameters:
// - fab: ReaderIO containing a function
// - fa: ReaderIO containing a value
//
// Returns a ReaderIO with the function applied to the value.
//
//go:inline
func MonadAp[B, A any](fab ReaderIO[func(A) B], fa ReaderIO[A]) ReaderIO[B] {
// dispatch to the configured version
if useParallel {
return MonadApPar(fab, fa)
}
return MonadApSeq(fab, fa)
}
// MonadApSeq implements sequential applicative application for [ReaderIO].
// It executes the function computation first, then the value computation.
//
// Parameters:
// - fab: ReaderIO containing a function
// - fa: ReaderIO containing a value
//
// Returns a ReaderIO with the function applied to the value.
//
//go:inline
func MonadApSeq[B, A any](fab ReaderIO[func(A) B], fa ReaderIO[A]) ReaderIO[B] {
return RIO.MonadApSeq(fab, fa)
}
// Ap applies a function wrapped in a [ReaderIO] to a value wrapped in a ReaderIO.
// This is the curried version of [MonadAp], using the default execution mode.
//
// Parameters:
// - fa: ReaderIO containing a value
//
// Returns a function that applies a ReaderIO function to the value.
//
//go:inline
func Ap[B, A any](fa ReaderIO[A]) Operator[func(A) B, B] {
return RIO.Ap[B](fa)
}
// ApSeq applies a function wrapped in a [ReaderIO] to a value sequentially.
// This is the curried version of [MonadApSeq].
//
// Parameters:
// - fa: ReaderIO containing a value
//
// Returns a function that applies a ReaderIO function to the value sequentially.
//
//go:inline
func ApSeq[B, A any](fa ReaderIO[A]) Operator[func(A) B, B] {
return function.Bind2nd(MonadApSeq[B, A], fa)
}
// ApPar applies a function wrapped in a [ReaderIO] to a value in parallel.
// This is the curried version of [MonadApPar].
//
// Parameters:
// - fa: ReaderIO containing a value
//
// Returns a function that applies a ReaderIO function to the value in parallel.
//
//go:inline
func ApPar[B, A any](fa ReaderIO[A]) Operator[func(A) B, B] {
return function.Bind2nd(MonadApPar[B, A], fa)
}
// Ask returns a [ReaderIO] that provides access to the context.
// This is useful for accessing the [context.Context] within a computation.
//
// Returns a ReaderIO that produces the context.
//
//go:inline
func Ask() ReaderIO[context.Context] {
return RIO.Ask[context.Context]()
}
// FromIO converts an [IO] into a [ReaderIO].
// The IO computation always succeeds, so it's wrapped in Right.
//
// Parameters:
// - t: The IO to convert
//
// Returns a ReaderIO that executes the IO and wraps the result in Right.
//
//go:inline
func FromIO[A any](t IO[A]) ReaderIO[A] {
return RIO.FromIO[context.Context](t)
}
// FromReader converts a [Reader] into a [ReaderIO].
// The Reader computation is lifted into the IO context, allowing it to be
// composed with other ReaderIO operations.
//
// Parameters:
// - t: The Reader to convert
//
// Returns a ReaderIO that executes the Reader and wraps the result in IO.
//
//go:inline
func FromReader[A any](t Reader[context.Context, A]) ReaderIO[A] {
return RIO.FromReader(t)
}
// FromLazy converts a [Lazy] computation into a [ReaderIO].
// The Lazy computation always succeeds, so it's wrapped in Right.
// This is an alias for [FromIO] since Lazy and IO have the same structure.
//
// Parameters:
// - t: The Lazy computation to convert
//
// Returns a ReaderIO that executes the Lazy computation and wraps the result in Right.
//
//go:inline
func FromLazy[A any](t Lazy[A]) ReaderIO[A] {
return RIO.FromIO[context.Context](t)
}
// MonadChainIOK chains a function that returns an [IO] into a [ReaderIO] computation.
// The IO computation always succeeds, so it's wrapped in Right.
//
// Parameters:
// - ma: The ReaderIO to chain from
// - f: Function that produces an IO
//
// Returns a new ReaderIO with the chained IO computation.
//
//go:inline
func MonadChainIOK[A, B any](ma ReaderIO[A], f func(A) IO[B]) ReaderIO[B] {
return RIO.MonadChainIOK(ma, f)
}
// ChainIOK chains a function that returns an [IO] into a [ReaderIO] computation.
// This is the curried version of [MonadChainIOK].
//
// Parameters:
// - f: Function that produces an IO
//
// Returns a function that chains the IO-returning function.
//
//go:inline
func ChainIOK[A, B any](f func(A) IO[B]) Operator[A, B] {
return RIO.ChainIOK[context.Context](f)
}
// MonadChainFirstIOK chains a function that returns an [IO] but keeps the original value.
// The IO computation is executed for its side effects only.
//
// Parameters:
// - ma: The ReaderIO to chain from
// - f: Function that produces an IO
//
// Returns a ReaderIO with the original value after executing the IO.
//
//go:inline
func MonadChainFirstIOK[A, B any](ma ReaderIO[A], f func(A) IO[B]) ReaderIO[A] {
return RIO.MonadChainFirstIOK(ma, f)
}
// MonadTapIOK chains a function that returns an [IO] but keeps the original value.
// This is an alias for [MonadChainFirstIOK] and is useful for side effects like logging.
//
// Parameters:
// - ma: The ReaderIO to tap
// - f: Function that produces an IO for side effects
//
// Returns a ReaderIO with the original value after executing the IO.
//
//go:inline
func MonadTapIOK[A, B any](ma ReaderIO[A], f func(A) IO[B]) ReaderIO[A] {
return RIO.MonadTapIOK(ma, f)
}
// ChainFirstIOK chains a function that returns an [IO] but keeps the original value.
// This is the curried version of [MonadChainFirstIOK].
//
// Parameters:
// - f: Function that produces an IO
//
// Returns a function that chains the IO-returning function.
//
//go:inline
func ChainFirstIOK[A, B any](f func(A) IO[B]) Operator[A, A] {
return RIO.ChainFirstIOK[context.Context](f)
}
// TapIOK chains a function that returns an [IO] but keeps the original value.
// This is the curried version of [MonadTapIOK], an alias for [ChainFirstIOK].
//
// Parameters:
// - f: Function that produces an IO for side effects
//
// Returns a function that taps with IO-returning functions.
//
//go:inline
func TapIOK[A, B any](f func(A) IO[B]) Operator[A, A] {
return RIO.TapIOK[context.Context](f)
}
// Defer creates a [ReaderIO] by lazily generating a new computation each time it's executed.
// This is useful for creating computations that should be re-evaluated on each execution.
//
// Parameters:
// - gen: Lazy generator function that produces a ReaderIO
//
// Returns a ReaderIO that generates a fresh computation on each execution.
//
//go:inline
func Defer[A any](gen Lazy[ReaderIO[A]]) ReaderIO[A] {
return RIO.Defer(gen)
}
// Memoize computes the value of the provided [ReaderIO] monad lazily but exactly once.
// The context used to compute the value is the context of the first call, so do not use this
// method if the value has a functional dependency on the content of the context.
//
// Parameters:
// - rdr: The ReaderIO to memoize
//
// Returns a ReaderIO that caches its result after the first execution.
//
//go:inline
func Memoize[A any](rdr ReaderIO[A]) ReaderIO[A] {
return RIO.Memoize(rdr)
}
// Flatten converts a nested [ReaderIO] into a flat [ReaderIO].
// This is equivalent to [MonadChain] with the identity function.
//
// Parameters:
// - rdr: The nested ReaderIO to flatten
//
// Returns a flattened ReaderIO.
//
//go:inline
func Flatten[A any](rdr ReaderIO[ReaderIO[A]]) ReaderIO[A] {
return RIO.Flatten(rdr)
}
// MonadFlap applies a value to a function wrapped in a [ReaderIO].
// This is the reverse of [MonadAp], useful in certain composition scenarios.
//
// Parameters:
// - fab: ReaderIO containing a function
// - a: The value to apply to the function
//
// Returns a ReaderIO with the function applied to the value.
//
//go:inline
func MonadFlap[B, A any](fab ReaderIO[func(A) B], a A) ReaderIO[B] {
return RIO.MonadFlap(fab, a)
}
// Flap applies a value to a function wrapped in a [ReaderIO].
// This is the curried version of [MonadFlap].
//
// Parameters:
// - a: The value to apply to the function
//
// Returns a function that applies the value to a ReaderIO function.
//
//go:inline
func Flap[B, A any](a A) Operator[func(A) B, B] {
return RIO.Flap[context.Context, B](a)
}
// MonadChainReaderK chains a [ReaderIO] with a function that returns a [Reader].
// The Reader is lifted into the ReaderIO context, allowing composition of
// Reader and ReaderIO operations.
//
// Parameters:
// - ma: The ReaderIO to chain from
// - f: Function that produces a Reader
//
// Returns a new ReaderIO with the chained Reader computation.
//
//go:inline
func MonadChainReaderK[A, B any](ma ReaderIO[A], f reader.Kleisli[context.Context, A, B]) ReaderIO[B] {
return RIO.MonadChainReaderK(ma, f)
}
// ChainReaderK chains a [ReaderIO] with a function that returns a [Reader].
// This is the curried version of [MonadChainReaderK].
//
// Parameters:
// - f: Function that produces a Reader
//
// Returns a function that chains Reader-returning functions.
//
//go:inline
func ChainReaderK[A, B any](f reader.Kleisli[context.Context, A, B]) Operator[A, B] {
return RIO.ChainReaderK(f)
}
// MonadChainFirstReaderK chains a function that returns a [Reader] but keeps the original value.
// The Reader computation is executed for its side effects only.
//
// Parameters:
// - ma: The ReaderIO to chain from
// - f: Function that produces a Reader
//
// Returns a ReaderIO with the original value after executing the Reader.
//
//go:inline
func MonadChainFirstReaderK[A, B any](ma ReaderIO[A], f reader.Kleisli[context.Context, A, B]) ReaderIO[A] {
return RIO.MonadChainFirstReaderK(ma, f)
}
// MonadTapReaderK chains a function that returns a [Reader] but keeps the original value.
// This is an alias for [MonadChainFirstReaderK] and is useful for side effects.
//
// Parameters:
// - ma: The ReaderIO to tap
// - f: Function that produces a Reader for side effects
//
// Returns a ReaderIO with the original value after executing the Reader.
//
//go:inline
func MonadTapReaderK[A, B any](ma ReaderIO[A], f reader.Kleisli[context.Context, A, B]) ReaderIO[A] {
return RIO.MonadTapReaderK(ma, f)
}
// ChainFirstReaderK chains a function that returns a [Reader] but keeps the original value.
// This is the curried version of [MonadChainFirstReaderK].
//
// Parameters:
// - f: Function that produces a Reader
//
// Returns a function that chains Reader-returning functions while preserving the original value.
//
//go:inline
func ChainFirstReaderK[A, B any](f reader.Kleisli[context.Context, A, B]) Operator[A, A] {
return RIO.ChainFirstReaderK(f)
}
// TapReaderK chains a function that returns a [Reader] but keeps the original value.
// This is the curried version of [MonadTapReaderK], an alias for [ChainFirstReaderK].
//
// Parameters:
// - f: Function that produces a Reader for side effects
//
// Returns a function that taps with Reader-returning functions.
//
//go:inline
func TapReaderK[A, B any](f reader.Kleisli[context.Context, A, B]) Operator[A, A] {
return RIO.TapReaderK(f)
}
// Read executes a [ReaderIO] with a given context, returning the resulting [IO].
// This is useful for providing the context dependency and obtaining an IO action
// that can be executed later.
//
// Parameters:
// - r: The context to provide to the ReaderIO
//
// Returns a function that converts a ReaderIO into an IO by applying the context.
//
//go:inline
func Read[A any](r context.Context) func(ReaderIO[A]) IO[A] {
return RIO.Read[A](r)
}
// Local transforms the context.Context environment before passing it to a ReaderIO computation.
//
// This is the Reader's local operation, which allows you to modify the environment
// for a specific computation without affecting the outer context. The transformation
// function receives the current context and returns a new context along with a
// cancel function. The cancel function is automatically called when the computation
// completes (via defer), ensuring proper cleanup of resources.
//
// This is useful for:
// - Adding timeouts or deadlines to specific operations
// - Adding context values for nested computations
// - Creating isolated context scopes
// - Implementing context-based dependency injection
//
// Type Parameters:
// - A: The value type of the ReaderIO
//
// Parameters:
// - f: A function that transforms the context and returns a cancel function
//
// Returns:
// - An Operator that runs the computation with the transformed context
//
// Example:
//
// import F "github.com/IBM/fp-go/v2/function"
//
// // Add a custom value to the context
// type key int
// const userKey key = 0
//
// addUser := readerio.Local[string](func(ctx context.Context) (context.Context, context.CancelFunc) {
// newCtx := context.WithValue(ctx, userKey, "Alice")
// return newCtx, func() {} // No-op cancel
// })
//
// getUser := readerio.FromReader(func(ctx context.Context) string {
// if user := ctx.Value(userKey); user != nil {
// return user.(string)
// }
// return "unknown"
// })
//
// result := F.Pipe1(
// getUser,
// addUser,
// )
// user := result(context.Background())() // Returns "Alice"
//
// Timeout Example:
//
// // Add a 5-second timeout to a specific operation
// withTimeout := readerio.Local[Data](func(ctx context.Context) (context.Context, context.CancelFunc) {
// return context.WithTimeout(ctx, 5*time.Second)
// })
//
// result := F.Pipe1(
// fetchData,
// withTimeout,
// )
func Local[A any](f func(context.Context) (context.Context, context.CancelFunc)) Operator[A, A] {
return func(rr ReaderIO[A]) ReaderIO[A] {
return func(ctx context.Context) IO[A] {
return func() A {
otherCtx, otherCancel := f(ctx)
defer otherCancel()
return rr(otherCtx)()
}
}
}
}
// WithTimeout adds a timeout to the context for a ReaderIO computation.
//
// This is a convenience wrapper around Local that uses context.WithTimeout.
// The computation must complete within the specified duration, or it will be
// cancelled. This is useful for ensuring operations don't run indefinitely
// and for implementing timeout-based error handling.
//
// The timeout is relative to when the ReaderIO is executed, not when
// WithTimeout is called. The cancel function is automatically called when
// the computation completes, ensuring proper cleanup.
//
// Type Parameters:
// - A: The value type of the ReaderIO
//
// Parameters:
// - timeout: The maximum duration for the computation
//
// Returns:
// - An Operator that runs the computation with a timeout
//
// Example:
//
// import (
// "time"
// F "github.com/IBM/fp-go/v2/function"
// )
//
// // Fetch data with a 5-second timeout
// fetchData := readerio.FromReader(func(ctx context.Context) Data {
// // Simulate slow operation
// select {
// case <-time.After(10 * time.Second):
// return Data{Value: "slow"}
// case <-ctx.Done():
// return Data{}
// }
// })
//
// result := F.Pipe1(
// fetchData,
// readerio.WithTimeout[Data](5*time.Second),
// )
// data := result(context.Background())() // Returns Data{} after 5s timeout
//
// Successful Example:
//
// quickFetch := readerio.Of(Data{Value: "quick"})
// result := F.Pipe1(
// quickFetch,
// readerio.WithTimeout[Data](5*time.Second),
// )
// data := result(context.Background())() // Returns Data{Value: "quick"}
func WithTimeout[A any](timeout time.Duration) Operator[A, A] {
return Local[A](func(ctx context.Context) (context.Context, context.CancelFunc) {
return context.WithTimeout(ctx, timeout)
})
}
// WithDeadline adds an absolute deadline to the context for a ReaderIO computation.
//
// This is a convenience wrapper around Local that uses context.WithDeadline.
// The computation must complete before the specified time, or it will be
// cancelled. This is useful for coordinating operations that must finish
// by a specific time, such as request deadlines or scheduled tasks.
//
// The deadline is an absolute time, unlike WithTimeout which uses a relative
// duration. The cancel function is automatically called when the computation
// completes, ensuring proper cleanup.
//
// Type Parameters:
// - A: The value type of the ReaderIO
//
// Parameters:
// - deadline: The absolute time by which the computation must complete
//
// Returns:
// - An Operator that runs the computation with a deadline
//
// Example:
//
// import (
// "time"
// F "github.com/IBM/fp-go/v2/function"
// )
//
// // Operation must complete by 3 PM
// deadline := time.Date(2024, 1, 1, 15, 0, 0, 0, time.UTC)
//
// fetchData := readerio.FromReader(func(ctx context.Context) Data {
// // Simulate operation
// select {
// case <-time.After(1 * time.Hour):
// return Data{Value: "done"}
// case <-ctx.Done():
// return Data{}
// }
// })
//
// result := F.Pipe1(
// fetchData,
// readerio.WithDeadline[Data](deadline),
// )
// data := result(context.Background())() // Returns Data{} if past deadline
//
// Combining with Parent Context:
//
// // If parent context already has a deadline, the earlier one takes precedence
// parentCtx, cancel := context.WithDeadline(context.Background(), time.Now().Add(1*time.Hour))
// defer cancel()
//
// laterDeadline := time.Now().Add(2 * time.Hour)
// result := F.Pipe1(
// fetchData,
// readerio.WithDeadline[Data](laterDeadline),
// )
// data := result(parentCtx)() // Will use parent's 1-hour deadline
func WithDeadline[A any](deadline time.Time) Operator[A, A] {
return Local[A](func(ctx context.Context) (context.Context, context.CancelFunc) {
return context.WithDeadline(ctx, deadline)
})
}
// Delay creates an operation that passes in the value after some delay
//
//go:inline
func Delay[A any](delay time.Duration) Operator[A, A] {
return RIO.Delay[context.Context, A](delay)
}
// After creates an operation that passes after the given [time.Time]
//
//go:inline
func After[R, E, A any](timestamp time.Time) Operator[A, A] {
return RIO.After[context.Context, A](timestamp)
}

View File

@@ -0,0 +1,502 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerio
import (
"context"
"testing"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/utils"
G "github.com/IBM/fp-go/v2/io"
N "github.com/IBM/fp-go/v2/number"
"github.com/IBM/fp-go/v2/reader"
"github.com/stretchr/testify/assert"
)
func TestMonadMap(t *testing.T) {
rio := Of(5)
doubled := MonadMap(rio, N.Mul(2))
result := doubled(context.Background())()
assert.Equal(t, 10, result)
}
func TestMap(t *testing.T) {
g := F.Pipe1(
Of(1),
Map(utils.Double),
)
assert.Equal(t, 2, g(context.Background())())
}
func TestMonadMapTo(t *testing.T) {
rio := Of(42)
replaced := MonadMapTo(rio, "constant")
result := replaced(context.Background())()
assert.Equal(t, "constant", result)
}
func TestMapTo(t *testing.T) {
result := F.Pipe1(
Of(42),
MapTo[int]("constant"),
)
assert.Equal(t, "constant", result(context.Background())())
}
func TestMonadChain(t *testing.T) {
rio1 := Of(5)
result := MonadChain(rio1, func(n int) ReaderIO[int] {
return Of(n * 3)
})
assert.Equal(t, 15, result(context.Background())())
}
func TestChain(t *testing.T) {
result := F.Pipe1(
Of(5),
Chain(func(n int) ReaderIO[int] {
return Of(n * 3)
}),
)
assert.Equal(t, 15, result(context.Background())())
}
func TestMonadChainFirst(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadChainFirst(rio, func(n int) ReaderIO[string] {
sideEffect = n
return Of("side effect")
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestChainFirst(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
ChainFirst(func(n int) ReaderIO[string] {
sideEffect = n
return Of("side effect")
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestMonadTap(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadTap(rio, func(n int) ReaderIO[func()] {
sideEffect = n
return Of(func() {})
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestTap(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
Tap(func(n int) ReaderIO[func()] {
sideEffect = n
return Of(func() {})
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestOf(t *testing.T) {
rio := Of(100)
result := rio(context.Background())()
assert.Equal(t, 100, result)
}
func TestMonadAp(t *testing.T) {
fabIO := Of(N.Mul(2))
faIO := Of(5)
result := MonadAp(fabIO, faIO)
assert.Equal(t, 10, result(context.Background())())
}
func TestAp(t *testing.T) {
g := F.Pipe1(
Of(utils.Double),
Ap[int](Of(1)),
)
assert.Equal(t, 2, g(context.Background())())
}
func TestMonadApSeq(t *testing.T) {
fabIO := Of(N.Add(10))
faIO := Of(5)
result := MonadApSeq(fabIO, faIO)
assert.Equal(t, 15, result(context.Background())())
}
func TestApSeq(t *testing.T) {
g := F.Pipe1(
Of(N.Add(10)),
ApSeq[int](Of(5)),
)
assert.Equal(t, 15, g(context.Background())())
}
func TestMonadApPar(t *testing.T) {
fabIO := Of(N.Add(10))
faIO := Of(5)
result := MonadApPar(fabIO, faIO)
assert.Equal(t, 15, result(context.Background())())
}
func TestApPar(t *testing.T) {
g := F.Pipe1(
Of(N.Add(10)),
ApPar[int](Of(5)),
)
assert.Equal(t, 15, g(context.Background())())
}
func TestAsk(t *testing.T) {
rio := Ask()
ctx := context.WithValue(context.Background(), "key", "value")
result := rio(ctx)()
assert.Equal(t, ctx, result)
}
func TestFromIO(t *testing.T) {
ioAction := G.Of(42)
rio := FromIO(ioAction)
result := rio(context.Background())()
assert.Equal(t, 42, result)
}
func TestFromReader(t *testing.T) {
rdr := func(ctx context.Context) int {
return 42
}
rio := FromReader(rdr)
result := rio(context.Background())()
assert.Equal(t, 42, result)
}
func TestFromLazy(t *testing.T) {
lazy := func() int { return 42 }
rio := FromLazy(lazy)
result := rio(context.Background())()
assert.Equal(t, 42, result)
}
func TestMonadChainIOK(t *testing.T) {
rio := Of(5)
result := MonadChainIOK(rio, func(n int) G.IO[int] {
return G.Of(n * 4)
})
assert.Equal(t, 20, result(context.Background())())
}
func TestChainIOK(t *testing.T) {
result := F.Pipe1(
Of(5),
ChainIOK(func(n int) G.IO[int] {
return G.Of(n * 4)
}),
)
assert.Equal(t, 20, result(context.Background())())
}
func TestMonadChainFirstIOK(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadChainFirstIOK(rio, func(n int) G.IO[string] {
sideEffect = n
return G.Of("side effect")
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestChainFirstIOK(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
ChainFirstIOK(func(n int) G.IO[string] {
sideEffect = n
return G.Of("side effect")
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestMonadTapIOK(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadTapIOK(rio, func(n int) G.IO[func()] {
sideEffect = n
return G.Of(func() {})
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestTapIOK(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
TapIOK(func(n int) G.IO[func()] {
sideEffect = n
return G.Of(func() {})
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestDefer(t *testing.T) {
counter := 0
rio := Defer(func() ReaderIO[int] {
counter++
return Of(counter)
})
result1 := rio(context.Background())()
result2 := rio(context.Background())()
assert.Equal(t, 1, result1)
assert.Equal(t, 2, result2)
}
func TestMemoize(t *testing.T) {
counter := 0
rio := Of(0)
memoized := Memoize(MonadMap(rio, func(int) int {
counter++
return counter
}))
result1 := memoized(context.Background())()
result2 := memoized(context.Background())()
assert.Equal(t, 1, result1)
assert.Equal(t, 1, result2) // Same value, memoized
}
func TestFlatten(t *testing.T) {
nested := Of(Of(42))
flattened := Flatten(nested)
result := flattened(context.Background())()
assert.Equal(t, 42, result)
}
func TestMonadFlap(t *testing.T) {
fabIO := Of(N.Mul(3))
result := MonadFlap(fabIO, 7)
assert.Equal(t, 21, result(context.Background())())
}
func TestFlap(t *testing.T) {
result := F.Pipe1(
Of(N.Mul(3)),
Flap[int](7),
)
assert.Equal(t, 21, result(context.Background())())
}
func TestMonadChainReaderK(t *testing.T) {
rio := Of(5)
result := MonadChainReaderK(rio, func(n int) reader.Reader[context.Context, int] {
return func(ctx context.Context) int { return n * 2 }
})
assert.Equal(t, 10, result(context.Background())())
}
func TestChainReaderK(t *testing.T) {
result := F.Pipe1(
Of(5),
ChainReaderK(func(n int) reader.Reader[context.Context, int] {
return func(ctx context.Context) int { return n * 2 }
}),
)
assert.Equal(t, 10, result(context.Background())())
}
func TestMonadChainFirstReaderK(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadChainFirstReaderK(rio, func(n int) reader.Reader[context.Context, string] {
return func(ctx context.Context) string {
sideEffect = n
return "side effect"
}
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestChainFirstReaderK(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
ChainFirstReaderK(func(n int) reader.Reader[context.Context, string] {
return func(ctx context.Context) string {
sideEffect = n
return "side effect"
}
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestMonadTapReaderK(t *testing.T) {
sideEffect := 0
rio := Of(42)
result := MonadTapReaderK(rio, func(n int) reader.Reader[context.Context, func()] {
return func(ctx context.Context) func() {
sideEffect = n
return func() {}
}
})
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestTapReaderK(t *testing.T) {
sideEffect := 0
result := F.Pipe1(
Of(42),
TapReaderK(func(n int) reader.Reader[context.Context, func()] {
return func(ctx context.Context) func() {
sideEffect = n
return func() {}
}
}),
)
value := result(context.Background())()
assert.Equal(t, 42, value)
assert.Equal(t, 42, sideEffect)
}
func TestRead(t *testing.T) {
rio := Of(42)
ctx := context.Background()
ioAction := Read[int](ctx)(rio)
result := ioAction()
assert.Equal(t, 42, result)
}
func TestComplexPipeline(t *testing.T) {
// Test a complex pipeline combining multiple operations
result := F.Pipe3(
Ask(),
Map(func(ctx context.Context) int { return 5 }),
Chain(func(n int) ReaderIO[int] {
return Of(n * 2)
}),
Map(N.Add(10)),
)
assert.Equal(t, 20, result(context.Background())()) // (5 * 2) + 10 = 20
}
func TestFromIOWithChain(t *testing.T) {
ioAction := G.Of(10)
result := F.Pipe1(
FromIO(ioAction),
Chain(func(n int) ReaderIO[int] {
return Of(n + 5)
}),
)
assert.Equal(t, 15, result(context.Background())())
}
func TestTapWithLogging(t *testing.T) {
// Simulate logging scenario
logged := []int{}
result := F.Pipe3(
Of(42),
Tap(func(n int) ReaderIO[func()] {
logged = append(logged, n)
return Of(func() {})
}),
Map(N.Mul(2)),
Tap(func(n int) ReaderIO[func()] {
logged = append(logged, n)
return Of(func() {})
}),
)
value := result(context.Background())()
assert.Equal(t, 84, value)
assert.Equal(t, []int{42, 84}, logged)
}

View File

@@ -0,0 +1,25 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerio
import (
"github.com/IBM/fp-go/v2/readerio"
)
//go:inline
func TailRec[A, B any](f Kleisli[A, Either[A, B]]) Kleisli[A, B] {
return readerio.TailRec(f)
}

View File

@@ -0,0 +1,41 @@
// Copyright (c) 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerio
import (
"github.com/IBM/fp-go/v2/retry"
RG "github.com/IBM/fp-go/v2/retry/generic"
)
//go:inline
func Retrying[A any](
policy retry.RetryPolicy,
action Kleisli[retry.RetryStatus, A],
check func(A) bool,
) ReaderIO[A] {
// get an implementation for the types
return RG.Retrying(
Chain[A, A],
Chain[retry.RetryStatus, A],
Of[A],
Of[retry.RetryStatus],
Delay[retry.RetryStatus],
policy,
action,
check,
)
}

View File

@@ -0,0 +1,75 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerio
import (
"context"
"github.com/IBM/fp-go/v2/consumer"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/lazy"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readerio"
)
type (
// Lazy represents a deferred computation that produces a value of type A when executed.
// The computation is not executed until explicitly invoked.
Lazy[A any] = lazy.Lazy[A]
// IO represents a side-effectful computation that produces a value of type A.
// The computation is deferred and only executed when invoked.
//
// IO[A] is equivalent to func() A
IO[A any] = io.IO[A]
// Reader represents a computation that depends on a context of type R.
// This is used for dependency injection and accessing shared context.
//
// Reader[R, A] is equivalent to func(R) A
Reader[R, A any] = reader.Reader[R, A]
// ReaderIO represents a context-dependent computation that performs side effects.
// This is specialized to use [context.Context] as the context type.
//
// ReaderIO[A] is equivalent to func(context.Context) func() A
ReaderIO[A any] = readerio.ReaderIO[context.Context, A]
// Kleisli represents a Kleisli arrow for the ReaderIO monad.
// It is a function that takes a value of type A and returns a ReaderIO computation
// that produces a value of type B.
//
// Kleisli arrows are used for composing monadic computations and are fundamental
// to functional programming patterns involving effects and context.
//
// Kleisli[A, B] is equivalent to func(A) func(context.Context) func() B
Kleisli[A, B any] = reader.Reader[A, ReaderIO[B]]
// Operator represents a transformation from one ReaderIO computation to another.
// It takes a ReaderIO[A] and returns a ReaderIO[B], allowing for the composition
// of context-dependent, side-effectful computations.
//
// Operators are useful for building pipelines of ReaderIO computations where
// each step can depend on the previous computation's result.
//
// Operator[A, B] is equivalent to func(ReaderIO[A]) func(context.Context) func() B
Operator[A, B any] = Kleisli[ReaderIO[A], B]
Consumer[A any] = consumer.Consumer[A]
Either[E, A any] = either.Either[E, A]
)

View File

@@ -0,0 +1,682 @@
# Sequence Functions and Point-Free Style Programming
This document explains how the `Sequence*` functions in the `context/readerioresult` package enable point-free style programming and improve code composition.
## Table of Contents
1. [What is Point-Free Style?](#what-is-point-free-style)
2. [The Problem: Nested Function Application](#the-problem-nested-function-application)
3. [The Solution: Sequence Functions](#the-solution-sequence-functions)
4. [How Sequence Enables Point-Free Style](#how-sequence-enables-point-free-style)
5. [TraverseReader: Introducing Dependencies](#traversereader-introducing-dependencies)
6. [Practical Benefits](#practical-benefits)
7. [Examples](#examples)
8. [Comparison: With and Without Sequence](#comparison-with-and-without-sequence)
## What is Point-Free Style?
Point-free style (also called tacit programming) is a programming paradigm where function definitions don't explicitly mention their arguments. Instead, functions are composed using combinators and higher-order functions.
**Traditional style (with points):**
```go
func double(x int) int {
return x * 2
}
```
**Point-free style (without points):**
```go
var double = N.Mul(2)
```
The key benefit is that point-free style emphasizes **what** the function does (its transformation) rather than **how** it manipulates data.
## The Problem: Nested Function Application
In functional programming with monadic types like `ReaderIOResult`, we often have nested structures where we need to apply parameters in a specific order. Consider:
```go
type ReaderIOResult[A any] = func(context.Context) func() Either[error, A]
type Reader[R, A any] = func(R) A
// A computation that produces a Reader
type Computation = ReaderIOResult[Reader[Config, int]]
// Expands to: func(context.Context) func() Either[error, func(Config) int]
```
To use this, we must apply parameters in this order:
1. First, provide `context.Context`
2. Then, execute the IO effect (call the function)
3. Then, unwrap the `Either` to get the `Reader`
4. Finally, provide the `Config`
This creates several problems:
### Problem 1: Awkward Parameter Order
```go
computation := getComputation()
ctx := context.Background()
cfg := Config{Value: 42}
// Must apply in this specific order
result := computation(ctx)() // Get Either[error, Reader[Config, int]]
if reader, err := either.Unwrap(result); err == nil {
value := reader(cfg) // Finally apply Config
// use value
}
```
The `Config` parameter, which is often known early and stable, must be provided last. This prevents partial application and reuse.
### Problem 2: Cannot Partially Apply Dependencies
```go
// Want to do this: create a reusable computation with Config baked in
// But can't because Config comes last!
withConfig := computation(cfg) // ❌ Doesn't work - cfg comes last, not first
```
### Problem 3: Breaks Point-Free Composition
```go
// Want to compose like this:
var pipeline = F.Flow3(
getComputation,
applyConfig(cfg), // ❌ Can't do this - Config comes last
processResult,
)
```
## The Solution: Sequence Functions
The `Sequence*` functions solve this by "flipping" or "sequencing" the nested structure, changing the order in which parameters are applied.
### SequenceReader
```go
func SequenceReader[R, A any](
ma ReaderIOResult[Reader[R, A]]
) Kleisli[R, A]
```
**Type transformation:**
```
From: func(context.Context) func() Either[error, func(R) A]
To: func(R) func(context.Context) func() Either[error, A]
```
Now `R` (the Reader's environment) comes **first**, before `context.Context`!
### SequenceReaderIO
```go
func SequenceReaderIO[R, A any](
ma ReaderIOResult[ReaderIO[R, A]]
) Kleisli[R, A]
```
**Type transformation:**
```
From: func(context.Context) func() Either[error, func(R) func() A]
To: func(R) func(context.Context) func() Either[error, A]
```
### SequenceReaderResult
```go
func SequenceReaderResult[R, A any](
ma ReaderIOResult[ReaderResult[R, A]]
) Kleisli[R, A]
```
**Type transformation:**
```
From: func(context.Context) func() Either[error, func(R) Either[error, A]]
To: func(R) func(context.Context) func() Either[error, A]
```
## How Sequence Enables Point-Free Style
### 1. Partial Application
By moving the environment parameter first, we can partially apply it:
```go
type Config struct { Multiplier int }
computation := getComputation() // ReaderIOResult[Reader[Config, int]]
sequenced := SequenceReader[Config, int](computation)
// Partially apply Config
cfg := Config{Multiplier: 5}
withConfig := sequenced(cfg) // ✅ Now we have ReaderIOResult[int]
// Reuse with different contexts
result1 := withConfig(ctx1)()
result2 := withConfig(ctx2)()
```
### 2. Dependency Injection
Inject dependencies early in the pipeline:
```go
type Database struct { ConnectionString string }
makeQuery := func(ctx context.Context) func() Either[error, func(Database) string] {
// ... implementation
}
// Sequence to enable DI
queryWithDB := SequenceReader[Database, string](makeQuery)
// Inject database
db := Database{ConnectionString: "localhost:5432"}
query := queryWithDB(db) // ✅ Database injected
// Use query with any context
result := query(context.Background())()
```
### 3. Point-Free Composition
Build pipelines without mentioning intermediate values:
```go
var pipeline = F.Flow3(
getComputation, // ReaderIOResult[Reader[Config, int]]
SequenceReader[Config, int], // func(Config) ReaderIOResult[int]
applyConfig(cfg), // ReaderIOResult[int]
)
// Or with partial application:
var withConfig = F.Pipe1(
getComputation(),
SequenceReader[Config, int],
)
result := withConfig(cfg)(ctx)()
```
### 4. Reusable Computations
Create specialized versions of generic computations:
```go
// Generic computation
makeServiceInfo := func(ctx context.Context) func() Either[error, func(ServiceConfig) string] {
// ... implementation
}
sequenced := SequenceReader[ServiceConfig, string](makeServiceInfo)
// Create specialized versions
authService := sequenced(ServiceConfig{Name: "Auth", Version: "1.0"})
userService := sequenced(ServiceConfig{Name: "User", Version: "2.0"})
// Reuse across contexts
authInfo := authService(ctx)()
userInfo := userService(ctx)()
```
## TraverseReader: Introducing Dependencies
While `SequenceReader` flips the parameter order of an existing nested structure, `TraverseReader` allows you to **introduce** a new Reader dependency into an existing computation.
### Function Signature
```go
func TraverseReader[R, A, B any](
f reader.Kleisli[R, A, B],
) func(ReaderIOResult[A]) Kleisli[R, B]
```
**Type transformation:**
```
Input: ReaderIOResult[A] = func(context.Context) func() Either[error, A]
With: reader.Kleisli[R, A, B] = func(A) func(R) B
Output: Kleisli[R, B] = func(R) func(context.Context) func() Either[error, B]
```
### What It Does
`TraverseReader` takes:
1. A Reader-based transformation `f: func(A) func(R) B` that depends on environment `R`
2. Returns a function that transforms `ReaderIOResult[A]` into `Kleisli[R, B]`
This allows you to:
- Add environment dependencies to computations that don't have them yet
- Transform values within a ReaderIOResult using environment-dependent logic
- Build composable pipelines where transformations depend on configuration
### Key Difference from SequenceReader
- **SequenceReader**: Works with computations that **already contain** a Reader (`ReaderIOResult[Reader[R, A]]`)
- Flips the order so `R` comes first
- No transformation of the value itself
- **TraverseReader**: Works with computations that **don't have** a Reader yet (`ReaderIOResult[A]`)
- Introduces a new Reader dependency via a transformation function
- Transforms `A` to `B` using environment `R`
### Example: Adding Configuration to a Computation
```go
type Config struct {
Multiplier int
Prefix string
}
// Original computation that just produces an int
getValue := func(ctx context.Context) func() Either[error, int] {
return func() Either[error, int] {
return Right[error](10)
}
}
// A Reader-based transformation that depends on Config
formatWithConfig := func(n int) func(Config) string {
return func(cfg Config) string {
result := n * cfg.Multiplier
return fmt.Sprintf("%s: %d", cfg.Prefix, result)
}
}
// Use TraverseReader to introduce Config dependency
traversed := TraverseReader[Config, int, string](formatWithConfig)
withConfig := traversed(getValue)
// Now we can provide Config to get the final result
cfg := Config{Multiplier: 5, Prefix: "Result"}
ctx := context.Background()
result := withConfig(cfg)(ctx)() // Returns Right("Result: 50")
```
### Point-Free Composition with TraverseReader
```go
// Build a pipeline that introduces dependencies at each stage
var pipeline = F.Flow4(
loadValue, // ReaderIOResult[int]
TraverseReader(multiplyByConfig), // Kleisli[Config, int]
applyConfig(cfg), // ReaderIOResult[int]
Chain(TraverseReader(formatWithStyle)), // Introduce another dependency
)
```
### When to Use TraverseReader vs SequenceReader
**Use SequenceReader when:**
- Your computation already returns a Reader: `ReaderIOResult[Reader[R, A]]`
- You just want to flip the parameter order
- No transformation of the value is needed
```go
// Already have Reader[Config, int]
computation := getComputation() // ReaderIOResult[Reader[Config, int]]
sequenced := SequenceReader[Config, int](computation)
result := sequenced(cfg)(ctx)()
```
**Use TraverseReader when:**
- Your computation doesn't have a Reader yet: `ReaderIOResult[A]`
- You want to transform the value using environment-dependent logic
- You're introducing a new dependency into the pipeline
```go
// Have ReaderIOResult[int], want to add Config dependency
computation := getValue() // ReaderIOResult[int]
traversed := TraverseReader[Config, int, string](formatWithConfig)
withDep := traversed(computation)
result := withDep(cfg)(ctx)()
```
### Practical Example: Multi-Stage Processing
```go
type DatabaseConfig struct {
ConnectionString string
Timeout time.Duration
}
type FormattingConfig struct {
DateFormat string
Timezone string
}
// Stage 1: Load raw data (no dependencies yet)
loadData := func(ctx context.Context) func() Either[error, RawData] {
// ... implementation
}
// Stage 2: Process with database config
processWithDB := func(raw RawData) func(DatabaseConfig) ProcessedData {
return func(cfg DatabaseConfig) ProcessedData {
// Use cfg.ConnectionString, cfg.Timeout
return ProcessedData{/* ... */}
}
}
// Stage 3: Format with formatting config
formatData := func(processed ProcessedData) func(FormattingConfig) string {
return func(cfg FormattingConfig) string {
// Use cfg.DateFormat, cfg.Timezone
return "formatted result"
}
}
// Build pipeline introducing dependencies at each stage
var pipeline = F.Flow3(
loadData,
TraverseReader[DatabaseConfig, RawData, ProcessedData](processWithDB),
// Now we have Kleisli[DatabaseConfig, ProcessedData]
applyConfig(dbConfig),
// Now we have ReaderIOResult[ProcessedData]
TraverseReader[FormattingConfig, ProcessedData, string](formatData),
// Now we have Kleisli[FormattingConfig, string]
)
// Execute with both configs
result := pipeline(fmtConfig)(ctx)()
```
### Combining TraverseReader and SequenceReader
You can combine both functions in complex pipelines:
```go
// Start with nested Reader
computation := getComputation() // ReaderIOResult[Reader[Config, User]]
var pipeline = F.Flow4(
computation,
SequenceReader[Config, User], // Flip to get Kleisli[Config, User]
applyConfig(cfg), // Apply config, get ReaderIOResult[User]
TraverseReader(enrichWithDatabase), // Add database dependency
// Now have Kleisli[Database, EnrichedUser]
)
result := pipeline(db)(ctx)()
```
## Practical Benefits
### 1. **Improved Testability**
Inject test dependencies easily:
```go
// Production
prodDB := Database{ConnectionString: "prod:5432"}
prodQuery := queryWithDB(prodDB)
// Testing
testDB := Database{ConnectionString: "test:5432"}
testQuery := queryWithDB(testDB)
// Same computation, different dependencies
```
### 2. **Better Separation of Concerns**
Separate configuration from execution:
```go
// Configuration phase (pure, no effects)
cfg := loadConfig()
computation := sequenced(cfg)
// Execution phase (with effects)
result := computation(ctx)()
```
### 3. **Enhanced Composability**
Build complex pipelines from simple pieces:
```go
var processUser = F.Flow4(
loadUserConfig, // ReaderIOResult[Reader[Database, User]]
SequenceReader, // func(Database) ReaderIOResult[User]
applyDatabase(db), // ReaderIOResult[User]
Chain(validateUser), // ReaderIOResult[ValidatedUser]
)
```
### 4. **Reduced Boilerplate**
No need to manually thread parameters:
```go
// Without Sequence - manual threading
func processWithConfig(cfg Config) ReaderIOResult[Result] {
return func(ctx context.Context) func() Either[error, Result] {
return func() Either[error, Result] {
comp := getComputation()(ctx)()
if reader, err := either.Unwrap(comp); err == nil {
value := reader(cfg)
// ... more processing
}
// ... error handling
}
}
}
// With Sequence - point-free
var processWithConfig = F.Flow2(
getComputation,
SequenceReader[Config, Result],
)
```
## Examples
### Example 1: Database Query with Configuration
```go
type QueryConfig struct {
Timeout time.Duration
MaxRows int
}
type Database struct {
ConnectionString string
}
// Without Sequence
func executeQueryOld(cfg QueryConfig, db Database) ReaderIOResult[[]Row] {
return func(ctx context.Context) func() Either[error, []Row] {
return func() Either[error, []Row] {
// Must manually handle all parameters
// ...
}
}
}
// With Sequence
func makeQuery(ctx context.Context) func() Either[error, func(Database) []Row] {
return func() Either[error, func(Database) []Row] {
return Right[error](func(db Database) []Row {
// Implementation
return []Row{}
})
}
}
var executeQuery = F.Flow2(
makeQuery,
SequenceReader[Database, []Row],
)
// Usage
db := Database{ConnectionString: "localhost:5432"}
query := executeQuery(db)
result := query(ctx)()
```
### Example 2: Multi-Service Architecture
```go
type ServiceRegistry struct {
AuthService AuthService
UserService UserService
EmailService EmailService
}
// Create computations that depend on services
makeAuthCheck := func(ctx context.Context) func() Either[error, func(ServiceRegistry) bool] {
// ... implementation
}
makeSendEmail := func(ctx context.Context) func() Either[error, func(ServiceRegistry) error] {
// ... implementation
}
// Sequence them
authCheck := SequenceReader[ServiceRegistry, bool](makeAuthCheck)
sendEmail := SequenceReader[ServiceRegistry, error](makeSendEmail)
// Inject services once
registry := ServiceRegistry{ /* ... */ }
checkAuth := authCheck(registry)
sendMail := sendEmail(registry)
// Use with different contexts
if isAuth, _ := either.Unwrap(checkAuth(ctx1)()); isAuth {
sendMail(ctx2)()
}
```
### Example 3: Configuration-Driven Pipeline
```go
type PipelineConfig struct {
Stage1Config Stage1Config
Stage2Config Stage2Config
Stage3Config Stage3Config
}
// Define stages
stage1 := SequenceReader[Stage1Config, IntermediateResult1](makeStage1)
stage2 := SequenceReader[Stage2Config, IntermediateResult2](makeStage2)
stage3 := SequenceReader[Stage3Config, FinalResult](makeStage3)
// Build pipeline with configuration
func buildPipeline(cfg PipelineConfig) ReaderIOResult[FinalResult] {
return F.Pipe3(
stage1(cfg.Stage1Config),
Chain(func(r1 IntermediateResult1) ReaderIOResult[IntermediateResult2] {
return stage2(cfg.Stage2Config)
}),
Chain(func(r2 IntermediateResult2) ReaderIOResult[FinalResult] {
return stage3(cfg.Stage3Config)
}),
)
}
// Execute pipeline
cfg := loadPipelineConfig()
pipeline := buildPipeline(cfg)
result := pipeline(ctx)()
```
## Comparison: With and Without Sequence
### Without Sequence (Imperative Style)
```go
func processUser(userID string) ReaderIOResult[ProcessedUser] {
return func(ctx context.Context) func() Either[error, ProcessedUser] {
return func() Either[error, ProcessedUser] {
// Get database
dbComp := getDatabase()(ctx)()
if dbReader, err := either.Unwrap(dbComp); err != nil {
return Left[ProcessedUser](err)
}
db := dbReader(dbConfig)
// Get user
userComp := getUser(userID)(ctx)()
if userReader, err := either.Unwrap(userComp); err != nil {
return Left[ProcessedUser](err)
}
user := userReader(db)
// Process user
processComp := processUserData(user)(ctx)()
if processReader, err := either.Unwrap(processComp); err != nil {
return Left[ProcessedUser](err)
}
result := processReader(processingConfig)
return Right[error](result)
}
}
}
```
### With Sequence (Point-Free Style)
```go
var processUser = func(userID string) ReaderIOResult[ProcessedUser] {
return F.Pipe3(
getDatabase,
SequenceReader[DatabaseConfig, Database],
applyConfig(dbConfig),
Chain(func(db Database) ReaderIOResult[User] {
return F.Pipe2(
getUser(userID),
SequenceReader[Database, User],
applyDB(db),
)
}),
Chain(func(user User) ReaderIOResult[ProcessedUser] {
return F.Pipe2(
processUserData(user),
SequenceReader[ProcessingConfig, ProcessedUser],
applyConfig(processingConfig),
)
}),
)
}
```
## Key Takeaways
1. **Sequence functions flip parameter order** to enable partial application
2. **Dependencies come first**, making them easy to inject and test
3. **Point-free style** becomes natural and readable
4. **Composition** is enhanced through proper parameter ordering
5. **Reusability** increases as computations can be specialized early
6. **Testability** improves through easy dependency injection
7. **Separation of concerns** is clearer (configuration vs. execution)
## When to Use Sequence
Use `Sequence*` functions when:
- ✅ You want to partially apply environment/configuration parameters
- ✅ You're building reusable computations with injected dependencies
- ✅ You need to test with different dependency implementations
- ✅ You're composing complex pipelines in point-free style
- ✅ You want to separate configuration from execution
- ✅ You're working with nested Reader-like structures
Don't use `Sequence*` when:
- ❌ The original parameter order is already optimal
- ❌ You're not doing any composition or partial application
- ❌ The added abstraction doesn't provide value
- ❌ The code is simpler without it
## Conclusion
The `Sequence*` functions are powerful tools for enabling point-free style programming in Go. By flipping the parameter order of nested monadic structures, they make it easy to:
- Partially apply dependencies
- Build composable pipelines
- Improve testability
- Write more declarative code
While they add a layer of abstraction, the benefits in terms of code reusability, testability, and composability make them invaluable for functional programming in Go.

View File

@@ -18,14 +18,13 @@ package readerioresult
import (
"context"
"github.com/IBM/fp-go/v2/context/readerio"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/apply"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioeither"
"github.com/IBM/fp-go/v2/ioresult"
L "github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readerio"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
"github.com/IBM/fp-go/v2/result"
)
@@ -96,7 +95,7 @@ func Bind[S1, S2, T any](
setter func(T) func(S1) S2,
f Kleisli[S1, T],
) Operator[S1, S2] {
return RIOR.Bind(setter, f)
return RIOR.Bind(setter, WithContextK(f))
}
// Let attaches the result of a computation to a context [S1] to produce a context [S2]
@@ -128,6 +127,13 @@ func BindTo[S1, T any](
return RIOR.BindTo[context.Context](setter)
}
//go:inline
func BindToP[S1, T any](
setter Prism[S1, T],
) Operator[T, S1] {
return BindTo(setter.ReverseGet)
}
// ApS attaches a value to a context [S1] to produce a context [S2] by considering
// the context and the value concurrently (using Applicative rather than Monad).
// This allows independent computations to be combined without one depending on the result of the other.
@@ -214,7 +220,7 @@ func ApS[S1, S2, T any](
//
//go:inline
func ApSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa ReaderIOResult[T],
) Operator[S, S] {
return ApS(lens.Set, fa)
@@ -253,10 +259,10 @@ func ApSL[S, T any](
//
//go:inline
func BindL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f Kleisli[T, T],
) Operator[S, S] {
return RIOR.BindL(lens, f)
return RIOR.BindL(lens, WithContextK(f))
}
// LetL is a variant of Let that uses a lens to focus on a specific part of the context.
@@ -289,8 +295,8 @@ func BindL[S, T any](
//
//go:inline
func LetL[S, T any](
lens L.Lens[S, T],
f func(T) T,
lens Lens[S, T],
f Endomorphism[T],
) Operator[S, S] {
return RIOR.LetL[context.Context](lens, f)
}
@@ -322,7 +328,7 @@ func LetL[S, T any](
//
//go:inline
func LetToL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
b T,
) Operator[S, S] {
return RIOR.LetToL[context.Context](lens, b)
@@ -398,7 +404,7 @@ func BindReaderK[S1, S2, T any](
//go:inline
func BindReaderIOK[S1, S2, T any](
setter func(T) func(S1) S2,
f readerio.Kleisli[context.Context, S1, T],
f readerio.Kleisli[S1, T],
) Operator[S1, S2] {
return Bind(setter, F.Flow2(f, FromReaderIO[T]))
}
@@ -443,7 +449,7 @@ func BindResultK[S1, S2, T any](
//
//go:inline
func BindIOEitherKL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f ioresult.Kleisli[T, T],
) Operator[S, S] {
return BindL(lens, F.Flow2(f, FromIOEither[T]))
@@ -458,7 +464,7 @@ func BindIOEitherKL[S, T any](
//
//go:inline
func BindIOResultKL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f ioresult.Kleisli[T, T],
) Operator[S, S] {
return BindL(lens, F.Flow2(f, FromIOEither[T]))
@@ -474,7 +480,7 @@ func BindIOResultKL[S, T any](
//
//go:inline
func BindIOKL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f io.Kleisli[T, T],
) Operator[S, S] {
return BindL(lens, F.Flow2(f, FromIO[T]))
@@ -490,7 +496,7 @@ func BindIOKL[S, T any](
//
//go:inline
func BindReaderKL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f reader.Kleisli[context.Context, T, T],
) Operator[S, S] {
return BindL(lens, F.Flow2(f, FromReader[T]))
@@ -506,8 +512,8 @@ func BindReaderKL[S, T any](
//
//go:inline
func BindReaderIOKL[S, T any](
lens L.Lens[S, T],
f readerio.Kleisli[context.Context, T, T],
lens Lens[S, T],
f readerio.Kleisli[T, T],
) Operator[S, S] {
return BindL(lens, F.Flow2(f, FromReaderIO[T]))
}
@@ -627,7 +633,7 @@ func ApResultS[S1, S2, T any](
//
//go:inline
func ApIOEitherSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa IOResult[T],
) Operator[S, S] {
return F.Bind2nd(F.Flow2[ReaderIOResult[S], ioresult.Operator[S, S]], ioresult.ApSL(lens, fa))
@@ -642,7 +648,7 @@ func ApIOEitherSL[S, T any](
//
//go:inline
func ApIOResultSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa IOResult[T],
) Operator[S, S] {
return F.Bind2nd(F.Flow2[ReaderIOResult[S], ioresult.Operator[S, S]], ioresult.ApSL(lens, fa))
@@ -657,7 +663,7 @@ func ApIOResultSL[S, T any](
//
//go:inline
func ApIOSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa IO[T],
) Operator[S, S] {
return ApSL(lens, FromIO(fa))
@@ -672,7 +678,7 @@ func ApIOSL[S, T any](
//
//go:inline
func ApReaderSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa Reader[context.Context, T],
) Operator[S, S] {
return ApSL(lens, FromReader(fa))
@@ -687,7 +693,7 @@ func ApReaderSL[S, T any](
//
//go:inline
func ApReaderIOSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa ReaderIO[T],
) Operator[S, S] {
return ApSL(lens, FromReaderIO(fa))
@@ -702,7 +708,7 @@ func ApReaderIOSL[S, T any](
//
//go:inline
func ApEitherSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa Result[T],
) Operator[S, S] {
return ApSL(lens, FromEither(fa))
@@ -717,7 +723,7 @@ func ApEitherSL[S, T any](
//
//go:inline
func ApResultSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa Result[T],
) Operator[S, S] {
return ApSL(lens, FromResult(fa))

View File

@@ -203,9 +203,7 @@ func TestApS_EmptyState(t *testing.T) {
result := res(t.Context())()
assert.True(t, E.IsRight(result))
emptyOpt := E.ToOption(result)
assert.True(t, O.IsSome(emptyOpt))
empty, _ := O.Unwrap(emptyOpt)
assert.Equal(t, Empty{}, empty)
assert.Equal(t, O.Of(Empty{}), emptyOpt)
}
func TestApS_ChainedWithBind(t *testing.T) {

View File

@@ -16,11 +16,14 @@
package readerioresult
import (
F "github.com/IBM/fp-go/v2/function"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
)
// Bracket makes sure that a resource is cleaned up in the event of an error. The release action is called regardless of
// whether the body action returns and error or not.
//
//go:inline
func Bracket[
A, B, ANY any](
@@ -28,5 +31,5 @@ func Bracket[
use Kleisli[A, B],
release func(A, Either[B]) ReaderIOResult[ANY],
) ReaderIOResult[B] {
return RIOR.Bracket(acquire, use, release)
return RIOR.Bracket(acquire, F.Flow2(use, WithContext), release)
}

View File

@@ -19,6 +19,7 @@ import (
"context"
CIOE "github.com/IBM/fp-go/v2/context/ioresult"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/ioeither"
)
@@ -34,9 +35,17 @@ import (
// Returns a ReaderIOResult that checks for cancellation before executing.
func WithContext[A any](ma ReaderIOResult[A]) ReaderIOResult[A] {
return func(ctx context.Context) IOEither[A] {
if err := context.Cause(ctx); err != nil {
return ioeither.Left[A](err)
if ctx.Err() != nil {
return ioeither.Left[A](context.Cause(ctx))
}
return CIOE.WithContext(ctx, ma(ctx))
}
}
//go:inline
func WithContextK[A, B any](f Kleisli[A, B]) Kleisli[A, B] {
return F.Flow2(
f,
WithContext,
)
}

View File

@@ -0,0 +1,13 @@
package readerioresult
import "github.com/IBM/fp-go/v2/io"
//go:inline
func ChainConsumer[A any](c Consumer[A]) Operator[A, struct{}] {
return ChainIOK(io.FromConsumerK(c))
}
//go:inline
func ChainFirstConsumer[A any](c Consumer[A]) Operator[A, A] {
return ChainFirstIOK(io.FromConsumerK(c))
}

View File

@@ -0,0 +1,295 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"context"
"github.com/IBM/fp-go/v2/reader"
RIO "github.com/IBM/fp-go/v2/readerio"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
RR "github.com/IBM/fp-go/v2/readerresult"
)
// SequenceReader transforms a ReaderIOResult containing a Reader into a function that
// takes the Reader's environment first, then returns a ReaderIOResult.
//
// This function "flips" or "sequences" the nested structure, changing the order in which
// parameters are applied. It's particularly useful for point-free style programming where
// you want to partially apply the inner Reader's environment before dealing with the
// outer context.
//
// Type transformation:
//
// From: ReaderIOResult[Reader[R, A]]
// = func(context.Context) func() Either[error, func(R) A]
//
// To: func(context.Context) func(R) IOResult[A]
// = func(context.Context) func(R) func() Either[error, A]
//
// This allows you to:
// 1. Provide the context.Context first
// 2. Then provide the Reader's environment R
// 3. Finally execute the IO effect to get Either[error, A]
//
// Point-free style benefits:
// - Enables partial application of the Reader environment
// - Facilitates composition of Reader-based computations
// - Allows building reusable computation pipelines
// - Supports dependency injection patterns where R represents dependencies
//
// Example:
//
// type Config struct {
// Timeout int
// }
//
// // A computation that produces a Reader based on context
// func getMultiplier(ctx context.Context) func() Either[error, func(Config) int] {
// return func() Either[error, func(Config) int] {
// return Right[error](func(cfg Config) int {
// return cfg.Timeout * 2
// })
// }
// }
//
// // Sequence it to apply Config first
// sequenced := SequenceReader[Config, int](getMultiplier)
//
// // Now we can partially apply the Config
// cfg := Config{Timeout: 30}
// ctx := context.Background()
// result := sequenced(ctx)(cfg)() // Returns Right(60)
//
// This is especially useful in point-free style when building computation pipelines:
//
// var pipeline = F.Flow3(
// loadConfig, // ReaderIOResult[Reader[Database, Config]]
// SequenceReader, // func(context.Context) func(Database) IOResult[Config]
// applyToDatabase(db), // IOResult[Config]
// )
//
//go:inline
func SequenceReader[R, A any](ma ReaderIOResult[Reader[R, A]]) Kleisli[R, A] {
return RIOR.SequenceReader(ma)
}
// SequenceReaderIO transforms a ReaderIOResult containing a ReaderIO into a function that
// takes the ReaderIO's environment first, then returns a ReaderIOResult.
//
// This is similar to SequenceReader but works with ReaderIO, which represents a computation
// that depends on an environment R and performs IO effects.
//
// Type transformation:
//
// From: ReaderIOResult[ReaderIO[R, A]]
// = func(context.Context) func() Either[error, func(R) func() A]
//
// To: func(context.Context) func(R) IOResult[A]
// = func(context.Context) func(R) func() Either[error, A]
//
// The key difference from SequenceReader is that the inner computation (ReaderIO) already
// performs IO effects, so the sequencing combines these effects properly.
//
// Point-free style benefits:
// - Enables composition of ReaderIO-based computations
// - Allows partial application of environment before IO execution
// - Facilitates building effect pipelines with dependency injection
// - Supports layered architecture where R represents service dependencies
//
// Example:
//
// type Database struct {
// ConnectionString string
// }
//
// // A computation that produces a ReaderIO based on context
// func getQuery(ctx context.Context) func() Either[error, func(Database) func() string] {
// return func() Either[error, func(Database) func() string] {
// return Right[error](func(db Database) func() string {
// return func() string {
// // Perform actual IO here
// return "Query result from " + db.ConnectionString
// }
// })
// }
// }
//
// // Sequence it to apply Database first
// sequenced := SequenceReaderIO[Database, string](getQuery)
//
// // Partially apply the Database
// db := Database{ConnectionString: "localhost:5432"}
// ctx := context.Background()
// result := sequenced(ctx)(db)() // Executes IO and returns Right("Query result...")
//
// In point-free style, this enables clean composition:
//
// var executeQuery = F.Flow3(
// prepareQuery, // ReaderIOResult[ReaderIO[Database, QueryResult]]
// SequenceReaderIO, // func(context.Context) func(Database) IOResult[QueryResult]
// withDatabase(db), // IOResult[QueryResult]
// )
//
//go:inline
func SequenceReaderIO[R, A any](ma ReaderIOResult[RIO.ReaderIO[R, A]]) Kleisli[R, A] {
return RIOR.SequenceReaderIO(ma)
}
// SequenceReaderResult transforms a ReaderIOResult containing a ReaderResult into a function
// that takes the ReaderResult's environment first, then returns a ReaderIOResult.
//
// This is similar to SequenceReader but works with ReaderResult, which represents a computation
// that depends on an environment R and can fail with an error.
//
// Type transformation:
//
// From: ReaderIOResult[ReaderResult[R, A]]
// = func(context.Context) func() Either[error, func(R) Either[error, A]]
//
// To: func(context.Context) func(R) IOResult[A]
// = func(context.Context) func(R) func() Either[error, A]
//
// The sequencing properly combines the error handling from both the outer ReaderIOResult
// and the inner ReaderResult, ensuring that errors from either level are propagated correctly.
//
// Point-free style benefits:
// - Enables composition of error-handling computations with dependency injection
// - Allows partial application of dependencies before error handling
// - Facilitates building validation pipelines with environment dependencies
// - Supports service-oriented architectures with proper error propagation
//
// Example:
//
// type Config struct {
// MaxRetries int
// }
//
// // A computation that produces a ReaderResult based on context
// func validateRetries(ctx context.Context) func() Either[error, func(Config) Either[error, int]] {
// return func() Either[error, func(Config) Either[error, int]] {
// return Right[error](func(cfg Config) Either[error, int] {
// if cfg.MaxRetries < 0 {
// return Left[int](errors.New("negative retries"))
// }
// return Right[error](cfg.MaxRetries)
// })
// }
// }
//
// // Sequence it to apply Config first
// sequenced := SequenceReaderResult[Config, int](validateRetries)
//
// // Partially apply the Config
// cfg := Config{MaxRetries: 3}
// ctx := context.Background()
// result := sequenced(ctx)(cfg)() // Returns Right(3)
//
// // With invalid config
// badCfg := Config{MaxRetries: -1}
// badResult := sequenced(ctx)(badCfg)() // Returns Left(error("negative retries"))
//
// In point-free style, this enables validation pipelines:
//
// var validateAndProcess = F.Flow4(
// loadConfig, // ReaderIOResult[ReaderResult[Config, Settings]]
// SequenceReaderResult, // func(context.Context) func(Config) IOResult[Settings]
// applyConfig(cfg), // IOResult[Settings]
// Chain(processSettings), // IOResult[Result]
// )
//
//go:inline
func SequenceReaderResult[R, A any](ma ReaderIOResult[RR.ReaderResult[R, A]]) Kleisli[R, A] {
return RIOR.SequenceReaderEither(ma)
}
// TraverseReader transforms a ReaderIOResult computation by applying a Reader-based function,
// effectively introducing a new environment dependency.
//
// This function takes a Reader-based transformation (Kleisli arrow) and returns a function that
// can transform a ReaderIOResult. The result allows you to provide the Reader's environment (R)
// first, which then produces a ReaderIOResult that depends on the context.
//
// Type transformation:
//
// From: ReaderIOResult[A]
// = func(context.Context) func() Either[error, A]
//
// With: reader.Kleisli[R, A, B]
// = func(A) func(R) B
//
// To: func(ReaderIOResult[A]) func(R) ReaderIOResult[B]
// = func(ReaderIOResult[A]) func(R) func(context.Context) func() Either[error, B]
//
// This enables:
// 1. Transforming values within a ReaderIOResult using environment-dependent logic
// 2. Introducing new environment dependencies into existing computations
// 3. Building composable pipelines where transformations depend on configuration or dependencies
// 4. Point-free style composition with Reader-based transformations
//
// Type Parameters:
// - R: The environment type that the Reader depends on
// - A: The input value type
// - B: The output value type
//
// Parameters:
// - f: A Reader-based Kleisli arrow that transforms A to B using environment R
//
// Returns:
// - A function that takes a ReaderIOResult[A] and returns a Kleisli[R, B],
// which is func(R) ReaderIOResult[B]
//
// The function preserves error handling and IO effects while adding the Reader environment dependency.
//
// Example:
//
// type Config struct {
// Multiplier int
// }
//
// // A Reader-based transformation that depends on Config
// multiply := func(x int) func(Config) int {
// return func(cfg Config) int {
// return x * cfg.Multiplier
// }
// }
//
// // Original computation that produces an int
// computation := Right[int](10)
//
// // Apply TraverseReader to introduce Config dependency
// traversed := TraverseReader[Config, int, int](multiply)
// result := traversed(computation)
//
// // Now we can provide the Config to get the final result
// cfg := Config{Multiplier: 5}
// ctx := context.Background()
// finalResult := result(cfg)(ctx)() // Returns Right(50)
//
// In point-free style, this enables clean composition:
//
// var pipeline = F.Flow3(
// loadValue, // ReaderIOResult[int]
// TraverseReader(multiplyByConfig), // func(Config) ReaderIOResult[int]
// applyConfig(cfg), // ReaderIOResult[int]
// )
//
//go:inline
func TraverseReader[R, A, B any](
f reader.Kleisli[R, A, B],
) func(ReaderIOResult[A]) Kleisli[R, B] {
return RIOR.TraverseReader[context.Context](f)
}

View File

@@ -0,0 +1,333 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult_test
import (
"context"
"fmt"
RIOE "github.com/IBM/fp-go/v2/context/readerioresult"
"github.com/IBM/fp-go/v2/either"
F "github.com/IBM/fp-go/v2/function"
)
// Example_sequenceReader_basicUsage demonstrates the basic usage of SequenceReader
// to flip the parameter order, enabling point-free style programming.
func Example_sequenceReader_basicUsage() {
type Config struct {
Multiplier int
}
// A computation that produces a Reader based on context
getComputation := func(ctx context.Context) func() either.Either[error, func(Config) int] {
return func() either.Either[error, func(Config) int] {
// This could check context for cancellation, deadlines, etc.
return either.Right[error](func(cfg Config) int {
return cfg.Multiplier * 10
})
}
}
// Sequence it to flip the parameter order
// Now Config comes first, then context
sequenced := RIOE.SequenceReader(getComputation)
// Partially apply the Config - this is the key benefit for point-free style
cfg := Config{Multiplier: 5}
withConfig := sequenced(cfg)
// Now we have a ReaderIOResult[int] that can be used with any context
ctx := context.Background()
result := withConfig(ctx)()
if value, err := either.Unwrap(result); err == nil {
fmt.Println(value)
}
// Output: 50
}
// Example_sequenceReader_dependencyInjection demonstrates how SequenceReader
// enables clean dependency injection patterns in point-free style.
func Example_sequenceReader_dependencyInjection() {
// Define our dependencies
type Database struct {
ConnectionString string
}
type UserService struct {
db Database
}
// A function that creates a computation requiring a Database
makeQuery := func(ctx context.Context) func() either.Either[error, func(Database) string] {
return func() either.Either[error, func(Database) string] {
return either.Right[error](func(db Database) string {
return fmt.Sprintf("Querying %s", db.ConnectionString)
})
}
}
// Sequence to enable dependency injection
queryWithDB := RIOE.SequenceReader(makeQuery)
// Inject the database dependency
db := Database{ConnectionString: "localhost:5432"}
query := queryWithDB(db)
// Execute with context
ctx := context.Background()
result := query(ctx)()
if value, err := either.Unwrap(result); err == nil {
fmt.Println(value)
}
// Output: Querying localhost:5432
}
// Example_sequenceReader_pointFreeComposition demonstrates how SequenceReader
// enables point-free style composition of computations.
func Example_sequenceReader_pointFreeComposition() {
type Config struct {
BaseValue int
}
// Step 1: Create a computation that produces a Reader
step1 := func(ctx context.Context) func() either.Either[error, func(Config) int] {
return func() either.Either[error, func(Config) int] {
return either.Right[error](func(cfg Config) int {
return cfg.BaseValue * 2
})
}
}
// Step 2: Sequence it to enable partial application
sequenced := RIOE.SequenceReader(step1)
// Step 3: Build a pipeline using point-free style
// Partially apply the config
cfg := Config{BaseValue: 10}
// Create a reusable computation with the config baked in
computation := F.Pipe1(
sequenced(cfg),
RIOE.Map(func(x int) int { return x + 5 }),
)
// Execute the pipeline
ctx := context.Background()
result := computation(ctx)()
if value, err := either.Unwrap(result); err == nil {
fmt.Println(value)
}
// Output: 25
}
// Example_sequenceReader_multipleEnvironments demonstrates using SequenceReader
// to work with multiple environment types in a clean, composable way.
func Example_sequenceReader_multipleEnvironments() {
type DatabaseConfig struct {
Host string
Port int
}
type APIConfig struct {
Endpoint string
APIKey string
}
// Function that needs DatabaseConfig
getDatabaseURL := func(ctx context.Context) func() either.Either[error, func(DatabaseConfig) string] {
return func() either.Either[error, func(DatabaseConfig) string] {
return either.Right[error](func(cfg DatabaseConfig) string {
return fmt.Sprintf("%s:%d", cfg.Host, cfg.Port)
})
}
}
// Function that needs APIConfig
getAPIURL := func(ctx context.Context) func() either.Either[error, func(APIConfig) string] {
return func() either.Either[error, func(APIConfig) string] {
return either.Right[error](func(cfg APIConfig) string {
return cfg.Endpoint
})
}
}
// Sequence both to enable partial application
withDBConfig := RIOE.SequenceReader(getDatabaseURL)
withAPIConfig := RIOE.SequenceReader(getAPIURL)
// Partially apply different configs
dbCfg := DatabaseConfig{Host: "localhost", Port: 5432}
apiCfg := APIConfig{Endpoint: "https://api.example.com", APIKey: "secret"}
dbQuery := withDBConfig(dbCfg)
apiQuery := withAPIConfig(apiCfg)
// Execute both with the same context
ctx := context.Background()
dbResult := dbQuery(ctx)()
apiResult := apiQuery(ctx)()
if dbURL, err := either.Unwrap(dbResult); err == nil {
fmt.Println("Database:", dbURL)
}
if apiURL, err := either.Unwrap(apiResult); err == nil {
fmt.Println("API:", apiURL)
}
// Output:
// Database: localhost:5432
// API: https://api.example.com
}
// Example_sequenceReaderResult_errorHandling demonstrates how SequenceReaderResult
// enables point-free style with proper error handling at multiple levels.
func Example_sequenceReaderResult_errorHandling() {
type ValidationConfig struct {
MinValue int
MaxValue int
}
// A computation that can fail at both outer and inner levels
makeValidator := func(ctx context.Context) func() either.Either[error, func(context.Context) either.Either[error, int]] {
return func() either.Either[error, func(context.Context) either.Either[error, int]] {
// Outer level: check context
if ctx.Err() != nil {
return either.Left[func(context.Context) either.Either[error, int]](ctx.Err())
}
// Return inner computation
return either.Right[error](func(innerCtx context.Context) either.Either[error, int] {
// Inner level: perform validation
value := 42
if value < 0 {
return either.Left[int](fmt.Errorf("value too small: %d", value))
}
if value > 100 {
return either.Left[int](fmt.Errorf("value too large: %d", value))
}
return either.Right[error](value)
})
}
}
// Sequence to enable point-free composition
sequenced := RIOE.SequenceReaderResult(makeValidator)
// Build a pipeline with error handling
ctx := context.Background()
pipeline := F.Pipe2(
sequenced(ctx),
RIOE.Map(func(x int) int { return x * 2 }),
RIOE.Chain(func(x int) RIOE.ReaderIOResult[string] {
return RIOE.Of(fmt.Sprintf("Result: %d", x))
}),
)
result := pipeline(ctx)()
if value, err := either.Unwrap(result); err == nil {
fmt.Println(value)
}
// Output: Result: 84
}
// Example_sequenceReader_partialApplication demonstrates the power of partial
// application enabled by SequenceReader for building reusable computations.
func Example_sequenceReader_partialApplication() {
type ServiceConfig struct {
ServiceName string
Version string
}
// Create a computation factory
makeServiceInfo := func(ctx context.Context) func() either.Either[error, func(ServiceConfig) string] {
return func() either.Either[error, func(ServiceConfig) string] {
return either.Right[error](func(cfg ServiceConfig) string {
return fmt.Sprintf("%s v%s", cfg.ServiceName, cfg.Version)
})
}
}
// Sequence it
sequenced := RIOE.SequenceReader(makeServiceInfo)
// Create multiple service configurations
authConfig := ServiceConfig{ServiceName: "AuthService", Version: "1.0.0"}
userConfig := ServiceConfig{ServiceName: "UserService", Version: "2.1.0"}
// Partially apply each config to create specialized computations
getAuthInfo := sequenced(authConfig)
getUserInfo := sequenced(userConfig)
// These can now be reused across different contexts
ctx := context.Background()
authResult := getAuthInfo(ctx)()
userResult := getUserInfo(ctx)()
if auth, err := either.Unwrap(authResult); err == nil {
fmt.Println(auth)
}
if user, err := either.Unwrap(userResult); err == nil {
fmt.Println(user)
}
// Output:
// AuthService v1.0.0
// UserService v2.1.0
}
// Example_sequenceReader_testingBenefits demonstrates how SequenceReader
// makes testing easier by allowing you to inject test dependencies.
func Example_sequenceReader_testingBenefits() {
// Simple logger that collects messages
type SimpleLogger struct {
Messages []string
}
// A computation that depends on a logger (using the struct directly)
makeLoggingOperation := func(ctx context.Context) func() either.Either[error, func(*SimpleLogger) string] {
return func() either.Either[error, func(*SimpleLogger) string] {
return either.Right[error](func(logger *SimpleLogger) string {
logger.Messages = append(logger.Messages, "Operation started")
result := "Success"
logger.Messages = append(logger.Messages, fmt.Sprintf("Operation completed: %s", result))
return result
})
}
}
// Sequence to enable dependency injection
sequenced := RIOE.SequenceReader(makeLoggingOperation)
// Inject a test logger
testLogger := &SimpleLogger{Messages: []string{}}
operation := sequenced(testLogger)
// Execute
ctx := context.Background()
result := operation(ctx)()
if value, err := either.Unwrap(result); err == nil {
fmt.Println("Result:", value)
fmt.Println("Logs:", len(testLogger.Messages))
}
// Output:
// Result: Success
// Logs: 2
}

View File

@@ -0,0 +1,866 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"context"
"errors"
"fmt"
"testing"
"github.com/IBM/fp-go/v2/either"
"github.com/stretchr/testify/assert"
)
func TestSequenceReader(t *testing.T) {
t.Run("flips parameter order for simple types", func(t *testing.T) {
// Original: ReaderIOResult[Reader[string, int]]
// = func(context.Context) func() Either[error, func(string) int]
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
return either.Right[error](func(s string) int {
return 10 + len(s)
})
}
}
// Sequenced: func(string) func(context.Context) IOResult[int]
// The Reader environment (string) is now the first parameter
sequenced := SequenceReader(original)
ctx := context.Background()
// Test original
result1 := original(ctx)()
assert.True(t, either.IsRight(result1))
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1("hello")
assert.Equal(t, 15, value1)
// Test sequenced - note the flipped order: string first, then context
result2 := sequenced("hello")(ctx)()
assert.True(t, either.IsRight(result2))
value2, _ := either.Unwrap(result2)
assert.Equal(t, 15, value2)
})
t.Run("flips parameter order for struct types", func(t *testing.T) {
type Database struct {
ConnectionString string
}
// Original: ReaderIOResult[Reader[Database, string]]
query := func(ctx context.Context) func() Either[Reader[Database, string]] {
return func() Either[Reader[Database, string]] {
if ctx.Err() != nil {
return either.Left[Reader[Database, string]](ctx.Err())
}
return either.Right[error](func(db Database) string {
return fmt.Sprintf("Query on %s", db.ConnectionString)
})
}
}
db := Database{ConnectionString: "localhost:5432"}
ctx := context.Background()
expected := "Query on localhost:5432"
// Sequence it
sequenced := SequenceReader(query)
// Test original with valid inputs
result1 := query(ctx)()
assert.True(t, either.IsRight(result1))
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1(db)
assert.Equal(t, expected, value1)
// Test sequenced with valid inputs - Database first, then context
result2 := sequenced(db)(ctx)()
assert.True(t, either.IsRight(result2))
value2, _ := either.Unwrap(result2)
assert.Equal(t, expected, value2)
})
t.Run("preserves outer error", func(t *testing.T) {
expectedError := errors.New("outer error")
// Original that fails at outer level
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
return either.Left[Reader[string, int]](expectedError)
}
}
ctx := context.Background()
// Test original with error
result1 := original(ctx)()
assert.True(t, either.IsLeft(result1))
_, err1 := either.Unwrap(result1)
assert.Equal(t, expectedError, err1)
// Test sequenced - the outer error is preserved
sequenced := SequenceReader(original)
result2 := sequenced("test")(ctx)()
assert.True(t, either.IsLeft(result2))
_, err2 := either.Unwrap(result2)
assert.Equal(t, expectedError, err2)
})
t.Run("preserves computation logic", func(t *testing.T) {
// Original function
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
return either.Right[error](func(s string) int {
return 3 * len(s)
})
}
}
ctx := context.Background()
// Sequence
sequenced := SequenceReader(original)
// Test that sequence produces correct results
result1 := original(ctx)()
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1("test")
result2 := sequenced("test")(ctx)()
value2, _ := either.Unwrap(result2)
assert.Equal(t, value1, value2)
assert.Equal(t, 12, value2) // 3 * 4
})
t.Run("works with zero values", func(t *testing.T) {
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
return either.Right[error](func(s string) int {
return len(s)
})
}
}
ctx := context.Background()
sequenced := SequenceReader(original)
// Test with zero values
result1 := original(ctx)()
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1("")
assert.Equal(t, 0, value1)
result2 := sequenced("")(ctx)()
value2, _ := either.Unwrap(result2)
assert.Equal(t, 0, value2)
})
t.Run("respects context cancellation", func(t *testing.T) {
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
if ctx.Err() != nil {
return either.Left[Reader[string, int]](ctx.Err())
}
return either.Right[error](func(s string) int {
return len(s)
})
}
}
ctx, cancel := context.WithCancel(context.Background())
cancel()
sequenced := SequenceReader(original)
result := sequenced("test")(ctx)()
assert.True(t, either.IsLeft(result))
_, err := either.Unwrap(result)
assert.Equal(t, context.Canceled, err)
})
t.Run("enables point-free style with partial application", func(t *testing.T) {
type Config struct {
Multiplier int
}
// Original computation
original := func(ctx context.Context) func() Either[Reader[Config, int]] {
return func() Either[Reader[Config, int]] {
return either.Right[error](func(cfg Config) int {
return cfg.Multiplier * 10
})
}
}
// Sequence to enable partial application
sequenced := SequenceReader(original)
// Partially apply the Config
cfg := Config{Multiplier: 5}
withConfig := sequenced(cfg)
// Now we have a ReaderIOResult[int] that can be used in different contexts
ctx1 := context.Background()
result1 := withConfig(ctx1)()
assert.True(t, either.IsRight(result1))
value1, _ := either.Unwrap(result1)
assert.Equal(t, 50, value1)
// Can reuse with different context
ctx2 := context.Background()
result2 := withConfig(ctx2)()
assert.True(t, either.IsRight(result2))
value2, _ := either.Unwrap(result2)
assert.Equal(t, 50, value2)
})
}
func TestSequenceReaderIO(t *testing.T) {
t.Run("flips parameter order for simple types", func(t *testing.T) {
// Original: ReaderIOResult[ReaderIO[int]]
// = func(context.Context) func() Either[error, func(context.Context) func() int]
original := func(ctx context.Context) func() Either[ReaderIO[int]] {
return func() Either[ReaderIO[int]] {
return either.Right[error](func(innerCtx context.Context) func() int {
return func() int {
return 20
}
})
}
}
ctx := context.Background()
sequenced := SequenceReaderIO(original)
// Test original
result1 := original(ctx)()
assert.True(t, either.IsRight(result1))
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1(ctx)()
assert.Equal(t, 20, value1)
// Test sequenced - context first, then context again for inner ReaderIO
result2 := sequenced(ctx)(ctx)()
assert.True(t, either.IsRight(result2))
value2, _ := either.Unwrap(result2)
assert.Equal(t, 20, value2)
})
t.Run("preserves outer error", func(t *testing.T) {
expectedError := errors.New("outer error")
// Original that fails at outer level
original := func(ctx context.Context) func() Either[ReaderIO[int]] {
return func() Either[ReaderIO[int]] {
return either.Left[ReaderIO[int]](expectedError)
}
}
ctx := context.Background()
// Test original with error
result1 := original(ctx)()
assert.True(t, either.IsLeft(result1))
_, err1 := either.Unwrap(result1)
assert.Equal(t, expectedError, err1)
// Test sequenced - the outer error is preserved
sequenced := SequenceReaderIO(original)
result2 := sequenced(ctx)(ctx)()
assert.True(t, either.IsLeft(result2))
_, err2 := either.Unwrap(result2)
assert.Equal(t, expectedError, err2)
})
t.Run("respects context cancellation in outer context", func(t *testing.T) {
original := func(ctx context.Context) func() Either[ReaderIO[int]] {
return func() Either[ReaderIO[int]] {
if ctx.Err() != nil {
return either.Left[ReaderIO[int]](ctx.Err())
}
return either.Right[error](func(innerCtx context.Context) func() int {
return func() int {
return 20
}
})
}
}
ctx, cancel := context.WithCancel(context.Background())
cancel()
sequenced := SequenceReaderIO(original)
result := sequenced(ctx)(ctx)()
assert.True(t, either.IsLeft(result))
_, err := either.Unwrap(result)
assert.Equal(t, context.Canceled, err)
})
}
func TestSequenceReaderResult(t *testing.T) {
t.Run("flips parameter order for simple types", func(t *testing.T) {
// Original: ReaderIOResult[ReaderResult[int]]
// = func(context.Context) func() Either[error, func(context.Context) Either[error, int]]
original := func(ctx context.Context) func() Either[ReaderResult[int]] {
return func() Either[ReaderResult[int]] {
return either.Right[error](func(innerCtx context.Context) Either[int] {
return either.Right[error](20)
})
}
}
ctx := context.Background()
sequenced := SequenceReaderResult(original)
// Test original
result1 := original(ctx)()
assert.True(t, either.IsRight(result1))
innerFunc1, _ := either.Unwrap(result1)
innerResult1 := innerFunc1(ctx)
assert.True(t, either.IsRight(innerResult1))
value1, _ := either.Unwrap(innerResult1)
assert.Equal(t, 20, value1)
// Test sequenced
result2 := sequenced(ctx)(ctx)()
assert.True(t, either.IsRight(result2))
value2, _ := either.Unwrap(result2)
assert.Equal(t, 20, value2)
})
t.Run("preserves outer error", func(t *testing.T) {
expectedError := errors.New("outer error")
// Original that fails at outer level
original := func(ctx context.Context) func() Either[ReaderResult[int]] {
return func() Either[ReaderResult[int]] {
return either.Left[ReaderResult[int]](expectedError)
}
}
ctx := context.Background()
// Test original with error
result1 := original(ctx)()
assert.True(t, either.IsLeft(result1))
_, err1 := either.Unwrap(result1)
assert.Equal(t, expectedError, err1)
// Test sequenced - the outer error is preserved
sequenced := SequenceReaderResult(original)
result2 := sequenced(ctx)(ctx)()
assert.True(t, either.IsLeft(result2))
_, err2 := either.Unwrap(result2)
assert.Equal(t, expectedError, err2)
})
t.Run("preserves inner error", func(t *testing.T) {
expectedError := errors.New("inner error")
// Original that fails at inner level
original := func(ctx context.Context) func() Either[ReaderResult[int]] {
return func() Either[ReaderResult[int]] {
return either.Right[error](func(innerCtx context.Context) Either[int] {
return either.Left[int](expectedError)
})
}
}
ctx := context.Background()
// Test original with inner error
result1 := original(ctx)()
assert.True(t, either.IsRight(result1))
innerFunc1, _ := either.Unwrap(result1)
innerResult1 := innerFunc1(ctx)
assert.True(t, either.IsLeft(innerResult1))
_, innerErr1 := either.Unwrap(innerResult1)
assert.Equal(t, expectedError, innerErr1)
// Test sequenced with inner error
sequenced := SequenceReaderResult(original)
result2 := sequenced(ctx)(ctx)()
assert.True(t, either.IsLeft(result2))
_, innerErr2 := either.Unwrap(result2)
assert.Equal(t, expectedError, innerErr2)
})
t.Run("handles errors at different levels", func(t *testing.T) {
// Original that can fail at both levels
makeOriginal := func(x int) ReaderIOResult[ReaderResult[int]] {
return func(ctx context.Context) func() Either[ReaderResult[int]] {
return func() Either[ReaderResult[int]] {
if x < -10 {
return either.Left[ReaderResult[int]](errors.New("outer: too negative"))
}
return either.Right[error](func(innerCtx context.Context) Either[int] {
if x < 0 {
return either.Left[int](errors.New("inner: negative value"))
}
return either.Right[error](x * 2)
})
}
}
}
ctx := context.Background()
// Test outer error
sequenced1 := SequenceReaderResult(makeOriginal(-20))
result1 := sequenced1(ctx)(ctx)()
assert.True(t, either.IsLeft(result1))
_, err1 := either.Unwrap(result1)
assert.Contains(t, err1.Error(), "outer")
// Test inner error
sequenced2 := SequenceReaderResult(makeOriginal(-5))
result2 := sequenced2(ctx)(ctx)()
assert.True(t, either.IsLeft(result2))
_, err2 := either.Unwrap(result2)
assert.Contains(t, err2.Error(), "inner")
// Test success
sequenced3 := SequenceReaderResult(makeOriginal(10))
result3 := sequenced3(ctx)(ctx)()
assert.True(t, either.IsRight(result3))
value3, _ := either.Unwrap(result3)
assert.Equal(t, 20, value3)
})
t.Run("respects context cancellation", func(t *testing.T) {
original := func(ctx context.Context) func() Either[ReaderResult[int]] {
return func() Either[ReaderResult[int]] {
if ctx.Err() != nil {
return either.Left[ReaderResult[int]](ctx.Err())
}
return either.Right[error](func(innerCtx context.Context) Either[int] {
if innerCtx.Err() != nil {
return either.Left[int](innerCtx.Err())
}
return either.Right[error](20)
})
}
}
ctx, cancel := context.WithCancel(context.Background())
cancel()
sequenced := SequenceReaderResult(original)
result := sequenced(ctx)(ctx)()
assert.True(t, either.IsLeft(result))
_, err := either.Unwrap(result)
assert.Equal(t, context.Canceled, err)
})
}
func TestSequenceEdgeCases(t *testing.T) {
t.Run("works with empty struct", func(t *testing.T) {
type Empty struct{}
original := func(ctx context.Context) func() Either[Reader[Empty, int]] {
return func() Either[Reader[Empty, int]] {
return either.Right[error](func(e Empty) int {
return 20
})
}
}
ctx := context.Background()
empty := Empty{}
sequenced := SequenceReader(original)
result1 := original(ctx)()
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1(empty)
assert.Equal(t, 20, value1)
result2 := sequenced(empty)(ctx)()
value2, _ := either.Unwrap(result2)
assert.Equal(t, 20, value2)
})
t.Run("works with pointer types", func(t *testing.T) {
type Data struct {
Value int
}
original := func(ctx context.Context) func() Either[Reader[*Data, int]] {
return func() Either[Reader[*Data, int]] {
return either.Right[error](func(d *Data) int {
if d == nil {
return 42
}
return 42 + d.Value
})
}
}
ctx := context.Background()
data := &Data{Value: 100}
sequenced := SequenceReader(original)
// Test with non-nil pointer
result1 := original(ctx)()
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1(data)
assert.Equal(t, 142, value1)
result2 := sequenced(data)(ctx)()
value2, _ := either.Unwrap(result2)
assert.Equal(t, 142, value2)
// Test with nil pointer
result3 := sequenced(nil)(ctx)()
value3, _ := either.Unwrap(result3)
assert.Equal(t, 42, value3)
})
t.Run("maintains referential transparency", func(t *testing.T) {
// The same inputs should always produce the same outputs
original := func(ctx context.Context) func() Either[Reader[string, int]] {
return func() Either[Reader[string, int]] {
return either.Right[error](func(s string) int {
return 10 + len(s)
})
}
}
ctx := context.Background()
sequenced := SequenceReader(original)
// Call multiple times with same inputs
for range 5 {
result1 := original(ctx)()
innerFunc1, _ := either.Unwrap(result1)
value1 := innerFunc1("hello")
assert.Equal(t, 15, value1)
result2 := sequenced("hello")(ctx)()
value2, _ := either.Unwrap(result2)
assert.Equal(t, 15, value2)
}
})
}
func TestTraverseReader(t *testing.T) {
t.Run("basic transformation with Reader dependency", func(t *testing.T) {
type Config struct {
Multiplier int
}
// Original computation
original := Right(10)
// Reader-based transformation
multiply := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x * cfg.Multiplier
}
}
// Apply TraverseReader
traversed := TraverseReader(multiply)
result := traversed(original)
// Provide Config and execute
cfg := Config{Multiplier: 5}
ctx := context.Background()
finalResult := result(cfg)(ctx)()
assert.True(t, either.IsRight(finalResult))
value, _ := either.Unwrap(finalResult)
assert.Equal(t, 50, value)
})
t.Run("preserves outer error", func(t *testing.T) {
type Config struct {
Multiplier int
}
expectedError := errors.New("computation failed")
// Original computation that fails
original := Left[int](expectedError)
// Reader-based transformation (won't be called)
multiply := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x * cfg.Multiplier
}
}
// Apply TraverseReader
traversed := TraverseReader(multiply)
result := traversed(original)
// Provide Config and execute
cfg := Config{Multiplier: 5}
ctx := context.Background()
finalResult := result(cfg)(ctx)()
assert.True(t, either.IsLeft(finalResult))
_, err := either.Unwrap(finalResult)
assert.Equal(t, expectedError, err)
})
t.Run("works with different types", func(t *testing.T) {
type Database struct {
Prefix string
}
// Original computation producing an int
original := Right(42)
// Reader-based transformation: int -> string using Database
format := func(x int) func(Database) string {
return func(db Database) string {
return fmt.Sprintf("%s:%d", db.Prefix, x)
}
}
// Apply TraverseReader
traversed := TraverseReader(format)
result := traversed(original)
// Provide Database and execute
db := Database{Prefix: "ID"}
ctx := context.Background()
finalResult := result(db)(ctx)()
assert.True(t, either.IsRight(finalResult))
value, _ := either.Unwrap(finalResult)
assert.Equal(t, "ID:42", value)
})
t.Run("works with struct environments", func(t *testing.T) {
type Settings struct {
Prefix string
Suffix string
}
// Original computation
original := Right("value")
// Reader-based transformation using Settings
decorate := func(s string) func(Settings) string {
return func(settings Settings) string {
return settings.Prefix + s + settings.Suffix
}
}
// Apply TraverseReader
traversed := TraverseReader(decorate)
result := traversed(original)
// Provide Settings and execute
settings := Settings{Prefix: "[", Suffix: "]"}
ctx := context.Background()
finalResult := result(settings)(ctx)()
assert.True(t, either.IsRight(finalResult))
value, _ := either.Unwrap(finalResult)
assert.Equal(t, "[value]", value)
})
t.Run("enables partial application", func(t *testing.T) {
type Config struct {
Factor int
}
// Original computation
original := Right(10)
// Reader-based transformation
scale := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x * cfg.Factor
}
}
// Apply TraverseReader
traversed := TraverseReader(scale)
result := traversed(original)
// Partially apply Config
cfg := Config{Factor: 3}
withConfig := result(cfg)
// Can now use with different contexts
ctx1 := context.Background()
finalResult1 := withConfig(ctx1)()
assert.True(t, either.IsRight(finalResult1))
value1, _ := either.Unwrap(finalResult1)
assert.Equal(t, 30, value1)
// Reuse with different context
ctx2 := context.Background()
finalResult2 := withConfig(ctx2)()
assert.True(t, either.IsRight(finalResult2))
value2, _ := either.Unwrap(finalResult2)
assert.Equal(t, 30, value2)
})
t.Run("respects context cancellation", func(t *testing.T) {
type Config struct {
Value int
}
// Original computation that checks context
original := func(ctx context.Context) func() Either[int] {
return func() Either[int] {
if ctx.Err() != nil {
return either.Left[int](ctx.Err())
}
return either.Right[error](10)
}
}
// Reader-based transformation
multiply := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x * cfg.Value
}
}
// Apply TraverseReader
traversed := TraverseReader(multiply)
result := traversed(original)
// Use canceled context
ctx, cancel := context.WithCancel(context.Background())
cancel()
cfg := Config{Value: 5}
finalResult := result(cfg)(ctx)()
assert.True(t, either.IsLeft(finalResult))
_, err := either.Unwrap(finalResult)
assert.Equal(t, context.Canceled, err)
})
t.Run("works with zero values", func(t *testing.T) {
type Config struct {
Offset int
}
// Original computation with zero value
original := Right(0)
// Reader-based transformation
add := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x + cfg.Offset
}
}
// Apply TraverseReader
traversed := TraverseReader(add)
result := traversed(original)
// Provide Config with zero offset
cfg := Config{Offset: 0}
ctx := context.Background()
finalResult := result(cfg)(ctx)()
assert.True(t, either.IsRight(finalResult))
value, _ := either.Unwrap(finalResult)
assert.Equal(t, 0, value)
})
t.Run("chains multiple transformations", func(t *testing.T) {
type Config struct {
Multiplier int
}
// Original computation
original := Right(5)
// First Reader-based transformation
multiply := func(x int) Reader[Config, int] {
return func(cfg Config) int {
return x * cfg.Multiplier
}
}
// Apply TraverseReader
traversed := TraverseReader(multiply)
result := traversed(original)
// Provide Config and execute
cfg := Config{Multiplier: 4}
ctx := context.Background()
finalResult := result(cfg)(ctx)()
assert.True(t, either.IsRight(finalResult))
value, _ := either.Unwrap(finalResult)
assert.Equal(t, 20, value) // 5 * 4 = 20
})
t.Run("works with complex Reader logic", func(t *testing.T) {
type ValidationRules struct {
MinValue int
MaxValue int
}
// Original computation
original := Right(50)
// Reader-based transformation with validation logic
validate := func(x int) func(ValidationRules) int {
return func(rules ValidationRules) int {
if x < rules.MinValue {
return rules.MinValue
}
if x > rules.MaxValue {
return rules.MaxValue
}
return x
}
}
// Apply TraverseReader
traversed := TraverseReader(validate)
result := traversed(original)
// Test with value within range
rules1 := ValidationRules{MinValue: 0, MaxValue: 100}
ctx := context.Background()
finalResult1 := result(rules1)(ctx)()
assert.True(t, either.IsRight(finalResult1))
value1, _ := either.Unwrap(finalResult1)
assert.Equal(t, 50, value1)
// Test with value above max
rules2 := ValidationRules{MinValue: 0, MaxValue: 30}
finalResult2 := result(rules2)(ctx)()
assert.True(t, either.IsRight(finalResult2))
value2, _ := either.Unwrap(finalResult2)
assert.Equal(t, 30, value2) // Clamped to max
// Test with value below min
rules3 := ValidationRules{MinValue: 60, MaxValue: 100}
finalResult3 := result(rules3)(ctx)()
assert.True(t, either.IsRight(finalResult3))
value3, _ := either.Unwrap(finalResult3)
assert.Equal(t, 60, value3) // Clamped to min
})
}

View File

@@ -73,7 +73,7 @@ type (
// It wraps a standard http.Client and provides functional HTTP operations.
client struct {
delegate *http.Client
doIOE func(*http.Request) IOE.IOEither[error, *http.Response]
doIOE IOE.Kleisli[error, *http.Request, *http.Response]
}
)
@@ -158,7 +158,7 @@ func MakeClient(httpClient *http.Client) Client {
// request := MakeGetRequest("https://api.example.com/data")
// fullResp := ReadFullResponse(client)(request)
// result := fullResp(context.Background())()
func ReadFullResponse(client Client) func(Requester) RIOE.ReaderIOResult[H.FullResponse] {
func ReadFullResponse(client Client) RIOE.Kleisli[Requester, H.FullResponse] {
return func(req Requester) RIOE.ReaderIOResult[H.FullResponse] {
return F.Flow3(
client.Do(req),
@@ -195,7 +195,7 @@ func ReadFullResponse(client Client) func(Requester) RIOE.ReaderIOResult[H.FullR
// request := MakeGetRequest("https://api.example.com/data")
// readBytes := ReadAll(client)
// result := readBytes(request)(context.Background())()
func ReadAll(client Client) func(Requester) RIOE.ReaderIOResult[[]byte] {
func ReadAll(client Client) RIOE.Kleisli[Requester, []byte] {
return F.Flow2(
ReadFullResponse(client),
RIOE.Map(H.Body),
@@ -219,7 +219,7 @@ func ReadAll(client Client) func(Requester) RIOE.ReaderIOResult[[]byte] {
// request := MakeGetRequest("https://api.example.com/text")
// readText := ReadText(client)
// result := readText(request)(context.Background())()
func ReadText(client Client) func(Requester) RIOE.ReaderIOResult[string] {
func ReadText(client Client) RIOE.Kleisli[Requester, string] {
return F.Flow2(
ReadAll(client),
RIOE.Map(B.ToString),
@@ -231,7 +231,7 @@ func ReadText(client Client) func(Requester) RIOE.ReaderIOResult[string] {
// Deprecated: Use [ReadJSON] instead. This function is kept for backward compatibility
// but will be removed in a future version. The capitalized version follows Go naming
// conventions for acronyms.
func ReadJson[A any](client Client) func(Requester) RIOE.ReaderIOResult[A] {
func ReadJson[A any](client Client) RIOE.Kleisli[Requester, A] {
return ReadJSON[A](client)
}
@@ -242,7 +242,7 @@ func ReadJson[A any](client Client) func(Requester) RIOE.ReaderIOResult[A] {
// 3. Reads the response body as bytes
//
// This function is used internally by ReadJSON to ensure proper JSON response handling.
func readJSON(client Client) func(Requester) RIOE.ReaderIOResult[[]byte] {
func readJSON(client Client) RIOE.Kleisli[Requester, []byte] {
return F.Flow3(
ReadFullResponse(client),
RIOE.ChainFirstEitherK(F.Flow2(
@@ -278,7 +278,7 @@ func readJSON(client Client) func(Requester) RIOE.ReaderIOResult[[]byte] {
// request := MakeGetRequest("https://api.example.com/user/1")
// readUser := ReadJSON[User](client)
// result := readUser(request)(context.Background())()
func ReadJSON[A any](client Client) func(Requester) RIOE.ReaderIOResult[A] {
func ReadJSON[A any](client Client) RIOE.Kleisli[Requester, A] {
return F.Flow2(
readJSON(client),
RIOE.ChainEitherK(J.Unmarshal[A]),

View File

@@ -0,0 +1,732 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package readerioresult provides logging utilities for ReaderIOResult computations.
// It includes functions for entry/exit logging with timing, correlation IDs, and context management.
package readerioresult
import (
"context"
"log/slog"
"sync/atomic"
"time"
"github.com/IBM/fp-go/v2/context/readerio"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/logging"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/result"
)
type (
// loggingContextKeyType is the type used as a key for storing logging information in context.Context
loggingContextKeyType int
// LoggingID is a unique identifier assigned to each logged operation for correlation
LoggingID uint64
// loggingContext holds the logging state for a computation, including timing,
// correlation ID, logger instance, and whether logging is enabled.
loggingContext struct {
contextID LoggingID // Unique identifier for this logged operation
startTime time.Time // When the operation started (for duration calculation)
logger *slog.Logger // The logger instance to use for this operation
isEnabled bool // Whether logging is enabled for this operation
}
)
var (
// loggingContextKey is the singleton key used to store/retrieve logging data from context
loggingContextKey loggingContextKeyType
// loggingCounter is an atomic counter that generates unique LoggingIDs
loggingCounter atomic.Uint64
loggingContextValue = F.Bind2nd(context.Context.Value, any(loggingContextKey))
withLoggingContextValue = F.Bind2of3(context.WithValue)(any(loggingContextKey))
// getLoggingContext retrieves the logging information (start time and ID) from the context.
// It returns a Pair containing the start time and the logging ID.
// This function assumes the context contains logging information; it will panic if not present.
getLoggingContext = F.Flow3(
loggingContextValue,
option.ToType[loggingContext],
option.GetOrElse(getDefaultLoggingContext),
)
)
// getDefaultLoggingContext returns a default logging context with the global logger.
// This is used when no logging context is found in the context.Context.
func getDefaultLoggingContext() loggingContext {
return loggingContext{
logger: logging.GetLogger(),
}
}
// withLoggingContext creates an endomorphism that adds a logging context to a context.Context.
// This is used internally to store logging state in the context for retrieval by nested operations.
//
// Parameters:
// - lctx: The logging context to store
//
// Returns:
// - An endomorphism that adds the logging context to a context.Context
func withLoggingContext(lctx loggingContext) Endomorphism[context.Context] {
return F.Bind2nd(withLoggingContextValue, any(lctx))
}
// LogEntryExitF creates a customizable operator that wraps a ReaderIOResult computation with entry/exit callbacks.
//
// This is a more flexible version of LogEntryExit that allows you to provide custom callbacks for
// entry and exit events. The onEntry callback receives the current context and can return a modified
// context (e.g., with additional logging information). The onExit callback receives the computation
// result and can perform custom logging, metrics collection, or cleanup.
//
// The function uses the bracket pattern to ensure that:
// - The onEntry callback is executed before the computation starts
// - The computation runs with the context returned by onEntry
// - The onExit callback is executed after the computation completes (success or failure)
// - The original result is preserved and returned unchanged
// - Cleanup happens even if the computation fails
//
// Type Parameters:
// - A: The success type of the ReaderIOResult
// - ANY: The return type of the onExit callback (typically any)
//
// Parameters:
// - onEntry: A ReaderIO that receives the current context and returns a (possibly modified) context.
// This is executed before the computation starts. Use this for logging entry, adding context values,
// starting timers, or initialization logic.
// - onExit: A Kleisli function that receives the Result[A] and returns a ReaderIO[ANY].
// This is executed after the computation completes, regardless of success or failure.
// Use this for logging exit, recording metrics, cleanup, or finalization logic.
//
// Returns:
// - An Operator that wraps the ReaderIOResult computation with the custom entry/exit callbacks
//
// Example with custom context modification:
//
// type RequestID string
//
// logOp := LogEntryExitF[User, any](
// func(ctx context.Context) IO[context.Context] {
// return func() context.Context {
// reqID := RequestID(uuid.New().String())
// log.Printf("[%s] Starting operation", reqID)
// return context.WithValue(ctx, "requestID", reqID)
// }
// },
// func(res Result[User]) ReaderIO[any] {
// return func(ctx context.Context) IO[any] {
// return func() any {
// reqID := ctx.Value("requestID").(RequestID)
// return F.Pipe1(
// res,
// result.Fold(
// func(err error) any {
// log.Printf("[%s] Operation failed: %v", reqID, err)
// return nil
// },
// func(_ User) any {
// log.Printf("[%s] Operation succeeded", reqID)
// return nil
// },
// ),
// )
// }
// }
// },
// )
//
// wrapped := logOp(fetchUser(123))
//
// Example with metrics collection:
//
// import "github.com/prometheus/client_golang/prometheus"
//
// metricsOp := LogEntryExitF[Response, any](
// func(ctx context.Context) IO[context.Context] {
// return func() context.Context {
// requestCount.WithLabelValues("api_call", "started").Inc()
// return context.WithValue(ctx, "startTime", time.Now())
// }
// },
// func(res Result[Response]) ReaderIO[any] {
// return func(ctx context.Context) IO[any] {
// return func() any {
// startTime := ctx.Value("startTime").(time.Time)
// duration := time.Since(startTime).Seconds()
//
// return F.Pipe1(
// res,
// result.Fold(
// func(err error) any {
// requestCount.WithLabelValues("api_call", "error").Inc()
// requestDuration.WithLabelValues("api_call", "error").Observe(duration)
// return nil
// },
// func(_ Response) any {
// requestCount.WithLabelValues("api_call", "success").Inc()
// requestDuration.WithLabelValues("api_call", "success").Observe(duration)
// return nil
// },
// ),
// )
// }
// }
// },
// )
//
// Use Cases:
// - Custom context modification: Adding request IDs, trace IDs, or other context values
// - Structured logging: Integration with zap, logrus, or other structured loggers
// - Metrics collection: Recording operation durations, success/failure rates
// - Distributed tracing: OpenTelemetry, Jaeger integration
// - Custom monitoring: Application-specific monitoring and alerting
//
// Note: LogEntryExit is implemented using LogEntryExitF with standard logging and context management.
// Use LogEntryExitF when you need more control over the entry/exit behavior or context modification.
func LogEntryExitF[A, ANY any](
onEntry ReaderIO[context.Context],
onExit readerio.Kleisli[Result[A], ANY],
) Operator[A, A] {
bracket := F.Bind13of3(readerio.Bracket[context.Context, Result[A], ANY])(onEntry, func(newCtx context.Context, res Result[A]) ReaderIO[ANY] {
return readerio.FromIO(onExit(res)(newCtx)) // Get the exit callback for this result
})
return func(src ReaderIOResult[A]) ReaderIOResult[A] {
return bracket(F.Flow2(
src,
FromIOResult,
))
}
}
// onEntry creates a ReaderIO that handles the entry logging for an operation.
// It generates a unique logging ID, captures the start time, and logs the entry message.
// The logging context is stored in the context.Context for later retrieval.
//
// Parameters:
// - logLevel: The slog.Level to use for logging (e.g., slog.LevelInfo, slog.LevelDebug)
// - cb: Callback function to retrieve the logger from the context
// - nameAttr: The slog.Attr containing the operation name
//
// Returns:
// - A ReaderIO that prepares the context with logging information and logs the entry
func onEntry(
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
nameAttr slog.Attr,
) ReaderIO[context.Context] {
return func(ctx context.Context) IO[context.Context] {
// logger
logger := cb(ctx)
return func() context.Context {
// check if the logger is enabled
if logger.Enabled(ctx, logLevel) {
// Generate unique logging ID and capture start time
contextID := LoggingID(loggingCounter.Add(1))
startTime := time.Now()
newLogger := logger.With("ID", contextID)
// log using ID
newLogger.LogAttrs(ctx, logLevel, "[entering]", nameAttr)
withCtx := withLoggingContext(loggingContext{
contextID: contextID,
startTime: startTime,
logger: newLogger,
isEnabled: true,
})
withLogger := logging.WithLogger(newLogger)
return withCtx(withLogger(ctx))
}
// logging disabled
withCtx := withLoggingContext(loggingContext{
logger: logger,
isEnabled: false,
})
return withCtx(ctx)
}
}
}
// onExitAny creates a Kleisli function that handles exit logging for an operation.
// It logs either success or error based on the Result, including the operation duration.
// Only logs if logging was enabled during entry (checked via loggingContext.isEnabled).
//
// Parameters:
// - logLevel: The slog.Level to use for logging
// - nameAttr: The slog.Attr containing the operation name
//
// Returns:
// - A Kleisli function that logs the exit/error and returns nil
func onExitAny(
logLevel slog.Level,
nameAttr slog.Attr,
) readerio.Kleisli[Result[any], any] {
return func(res Result[any]) ReaderIO[any] {
return func(ctx context.Context) IO[any] {
value := getLoggingContext(ctx)
if value.isEnabled {
return func() any {
// Retrieve logging information from context
durationAttr := slog.Duration("duration", time.Since(value.startTime))
// Log error with ID and duration
onError := func(err error) any {
value.logger.LogAttrs(ctx, logLevel, "[throwing]",
nameAttr,
durationAttr,
slog.Any("error", err))
return nil
}
// Log success with ID and duration
onSuccess := func(_ any) any {
value.logger.LogAttrs(ctx, logLevel, "[exiting ]", nameAttr, durationAttr)
return nil
}
return F.Pipe1(
res,
result.Fold(onError, onSuccess),
)
}
}
// nothing to do
return io.Of[any](nil)
}
}
}
// LogEntryExitWithCallback creates an operator that logs entry and exit of a ReaderIOResult computation
// using a custom logger callback and log level. This provides more control than LogEntryExit.
//
// This function allows you to:
// - Use a custom log level (Debug, Info, Warn, Error)
// - Retrieve the logger from the context using a custom callback
// - Control whether logging is enabled based on the logger's configuration
//
// Type Parameters:
// - A: The success type of the ReaderIOResult
//
// Parameters:
// - logLevel: The slog.Level to use for all log messages (entry, exit, error)
// - cb: Callback function to retrieve the *slog.Logger from the context
// - name: A descriptive name for the operation
//
// Returns:
// - An Operator that wraps the ReaderIOResult with customizable logging
//
// Example with custom log level:
//
// // Log at debug level
// debugOp := LogEntryExitWithCallback[User](
// slog.LevelDebug,
// logging.GetLoggerFromContext,
// "fetchUser",
// )
// result := debugOp(fetchUser(123))
//
// Example with custom logger callback:
//
// type loggerKey int
// const myLoggerKey loggerKey = 0
//
// getMyLogger := func(ctx context.Context) *slog.Logger {
// if logger := ctx.Value(myLoggerKey); logger != nil {
// return logger.(*slog.Logger)
// }
// return slog.Default()
// }
//
// customOp := LogEntryExitWithCallback[Data](
// slog.LevelInfo,
// getMyLogger,
// "processData",
// )
func LogEntryExitWithCallback[A any](
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
name string) Operator[A, A] {
nameAttr := slog.String("name", name)
return LogEntryExitF(
onEntry(logLevel, cb, nameAttr),
F.Flow2(
result.MapTo[A, any](nil),
onExitAny(logLevel, nameAttr),
),
)
}
// LogEntryExit creates an operator that logs the entry and exit of a ReaderIOResult computation with timing and correlation IDs.
//
// This function wraps a ReaderIOResult computation with automatic logging that tracks:
// - Entry: Logs when the computation starts with "[entering <id>] <name>"
// - Exit: Logs when the computation completes successfully with "[exiting <id>] <name> [duration]"
// - Error: Logs when the computation fails with "[throwing <id>] <name> [duration]: <error>"
//
// Each logged operation is assigned a unique LoggingID (a monotonically increasing counter) that
// appears in all log messages for that operation. This ID enables correlation of entry and exit
// logs, even when multiple operations are running concurrently or are interleaved.
//
// The logging information (start time and ID) is stored in the context and can be retrieved using
// getLoggingContext or getLoggingID. This allows nested operations to access the parent operation's
// logging information.
//
// Type Parameters:
// - A: The success type of the ReaderIOResult
//
// Parameters:
// - name: A descriptive name for the computation, used in log messages to identify the operation
//
// Returns:
// - An Operator that wraps the ReaderIOResult computation with entry/exit logging
//
// The function uses the bracket pattern to ensure that:
// - Entry is logged before the computation starts
// - A unique LoggingID is assigned and stored in the context
// - Exit/error is logged after the computation completes, regardless of success or failure
// - Timing is accurate, measuring from entry to exit
// - The original result is preserved and returned unchanged
//
// Log Format:
// - Entry: "[entering <id>] <name>"
// - Success: "[exiting <id>] <name> [<duration>s]"
// - Error: "[throwing <id>] <name> [<duration>s]: <error>"
//
// Example with successful computation:
//
// fetchUser := func(id int) ReaderIOResult[User] {
// return Of(User{ID: id, Name: "Alice"})
// }
//
// // Wrap with logging
// loggedFetch := LogEntryExit[User]("fetchUser")(fetchUser(123))
//
// // Execute
// result := loggedFetch(context.Background())()
// // Logs:
// // [entering 1] fetchUser
// // [exiting 1] fetchUser [0.1s]
//
// Example with error:
//
// failingOp := func() ReaderIOResult[string] {
// return Left[string](errors.New("connection timeout"))
// }
//
// logged := LogEntryExit[string]("failingOp")(failingOp())
// result := logged(context.Background())()
// // Logs:
// // [entering 2] failingOp
// // [throwing 2] failingOp [0.0s]: connection timeout
//
// Example with nested operations:
//
// fetchOrders := func(userID int) ReaderIOResult[[]Order] {
// return Of([]Order{{ID: 1}})
// }
//
// pipeline := F.Pipe3(
// fetchUser(123),
// LogEntryExit[User]("fetchUser"),
// Chain(func(user User) ReaderIOResult[[]Order] {
// return fetchOrders(user.ID)
// }),
// LogEntryExit[[]Order]("fetchOrders"),
// )
//
// result := pipeline(context.Background())()
// // Logs:
// // [entering 3] fetchUser
// // [exiting 3] fetchUser [0.1s]
// // [entering 4] fetchOrders
// // [exiting 4] fetchOrders [0.2s]
//
// Example with concurrent operations:
//
// // Multiple operations can run concurrently, each with unique IDs
// op1 := LogEntryExit[Data]("operation1")(fetchData(1))
// op2 := LogEntryExit[Data]("operation2")(fetchData(2))
//
// go op1(context.Background())()
// go op2(context.Background())()
// // Logs (order may vary):
// // [entering 5] operation1
// // [entering 6] operation2
// // [exiting 5] operation1 [0.1s]
// // [exiting 6] operation2 [0.2s]
// // The IDs allow correlation even when logs are interleaved
//
// Use Cases:
// - Debugging: Track execution flow through complex ReaderIOResult chains with correlation IDs
// - Performance monitoring: Identify slow operations with timing information
// - Production logging: Monitor critical operations with unique identifiers
// - Concurrent operations: Correlate logs from multiple concurrent operations
// - Nested operations: Track parent-child relationships in operation hierarchies
// - Troubleshooting: Quickly identify where errors occur and correlate with entry logs
//
//go:inline
func LogEntryExit[A any](name string) Operator[A, A] {
return LogEntryExitWithCallback[A](slog.LevelInfo, logging.GetLoggerFromContext, name)
}
func curriedLog(
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
message string) func(slog.Attr) func(context.Context) func() struct{} {
return F.Curry2(func(a slog.Attr, ctx context.Context) func() struct{} {
logger := cb(ctx)
return func() struct{} {
logger.LogAttrs(ctx, logLevel, message, a)
return struct{}{}
}
})
}
// SLogWithCallback creates a Kleisli arrow that logs a Result value (success or error) with a custom logger and log level.
//
// This function logs both successful values and errors, making it useful for debugging and monitoring
// Result values as they flow through a computation. Unlike TapSLog which only logs successful values,
// SLogWithCallback logs the Result regardless of whether it contains a value or an error.
//
// The logged output includes:
// - For success: The message with the value as a structured "value" attribute
// - For error: The message with the error as a structured "error" attribute
//
// The Result is passed through unchanged after logging.
//
// Type Parameters:
// - A: The success type of the Result
//
// Parameters:
// - logLevel: The slog.Level to use for logging (e.g., slog.LevelInfo, slog.LevelDebug)
// - cb: Callback function to retrieve the *slog.Logger from the context
// - message: A descriptive message to include in the log entry
//
// Returns:
// - A Kleisli arrow that logs the Result (value or error) and returns it unchanged
//
// Example with custom log level:
//
// debugLog := SLogWithCallback[User](
// slog.LevelDebug,
// logging.GetLoggerFromContext,
// "User result",
// )
//
// pipeline := F.Pipe2(
// fetchUser(123),
// Chain(debugLog),
// Map(func(u User) string { return u.Name }),
// )
//
// Example with custom logger:
//
// type loggerKey int
// const myLoggerKey loggerKey = 0
//
// getMyLogger := func(ctx context.Context) *slog.Logger {
// if logger := ctx.Value(myLoggerKey); logger != nil {
// return logger.(*slog.Logger)
// }
// return slog.Default()
// }
//
// customLog := SLogWithCallback[Data](
// slog.LevelWarn,
// getMyLogger,
// "Data processing result",
// )
//
// Use Cases:
// - Debugging: Log both successful and failed Results in a pipeline
// - Error tracking: Monitor error occurrences with custom log levels
// - Custom logging: Use application-specific loggers and log levels
// - Conditional logging: Enable/disable logging based on logger configuration
func SLogWithCallback[A any](
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
message string) Kleisli[Result[A], A] {
return F.Pipe1(
F.Flow2(
// create the attribute to log depending on the condition
result.ToSLogAttr[A](),
// create an `IO` that logs the attribute
curriedLog(logLevel, cb, message),
),
// preserve the original context
reader.Chain(reader.Sequence(readerio.MapTo[struct{}, Result[A]])),
)
}
// SLog creates a Kleisli arrow that logs a Result value (success or error) with a message.
//
// This function logs both successful values and errors at Info level using the logger from the context.
// It's a convenience wrapper around SLogWithCallback with standard settings.
//
// The logged output includes:
// - For success: The message with the value as a structured "value" attribute
// - For error: The message with the error as a structured "error" attribute
//
// The Result is passed through unchanged after logging, making this function transparent in the
// computation pipeline.
//
// Type Parameters:
// - A: The success type of the Result
//
// Parameters:
// - message: A descriptive message to include in the log entry
//
// Returns:
// - A Kleisli arrow that logs the Result (value or error) and returns it unchanged
//
// Example with successful Result:
//
// pipeline := F.Pipe2(
// fetchUser(123),
// Chain(SLog[User]("Fetched user")),
// Map(func(u User) string { return u.Name }),
// )
//
// result := pipeline(context.Background())()
// // If successful, logs: "Fetched user" value={ID:123 Name:"Alice"}
// // If error, logs: "Fetched user" error="user not found"
//
// Example in error handling pipeline:
//
// pipeline := F.Pipe3(
// fetchData(id),
// Chain(SLog[Data]("Data fetched")),
// Chain(validateData),
// Chain(SLog[Data]("Data validated")),
// Chain(processData),
// )
//
// // Logs each step, including errors:
// // "Data fetched" value={...} or error="..."
// // "Data validated" value={...} or error="..."
//
// Use Cases:
// - Debugging: Track both successful and failed Results in a pipeline
// - Error monitoring: Log errors as they occur in the computation
// - Flow tracking: See the progression of Results through a pipeline
// - Troubleshooting: Identify where errors are introduced or propagated
//
// Note: This function logs the Result itself (which may contain an error), not just successful values.
// For logging only successful values, use TapSLog instead.
//
//go:inline
func SLog[A any](message string) Kleisli[Result[A], A] {
return SLogWithCallback[A](slog.LevelInfo, logging.GetLoggerFromContext, message)
}
// TapSLog creates an operator that logs only successful values with a message and passes them through unchanged.
//
// This function is useful for debugging and monitoring values as they flow through a ReaderIOResult
// computation chain. Unlike SLog which logs both successes and errors, TapSLog only logs when the
// computation is successful. If the computation contains an error, no logging occurs and the error
// is propagated unchanged.
//
// The logged output includes:
// - The provided message
// - The value being passed through (as a structured "value" attribute)
//
// Type Parameters:
// - A: The type of the value to log and pass through
//
// Parameters:
// - message: A descriptive message to include in the log entry
//
// Returns:
// - An Operator that logs successful values and returns them unchanged
//
// Example with simple value logging:
//
// fetchUser := func(id int) ReaderIOResult[User] {
// return Of(User{ID: id, Name: "Alice"})
// }
//
// pipeline := F.Pipe2(
// fetchUser(123),
// TapSLog[User]("Fetched user"),
// Map(func(u User) string { return u.Name }),
// )
//
// result := pipeline(context.Background())()
// // Logs: "Fetched user" value={ID:123 Name:"Alice"}
// // Returns: result.Of("Alice")
//
// Example in a processing pipeline:
//
// processOrder := F.Pipe4(
// fetchOrder(orderId),
// TapSLog[Order]("Order fetched"),
// Chain(validateOrder),
// TapSLog[Order]("Order validated"),
// Chain(processPayment),
// TapSLog[Payment]("Payment processed"),
// )
//
// result := processOrder(context.Background())()
// // Logs each successful step with the intermediate values
// // If any step fails, subsequent TapSLog calls don't log
//
// Example with error handling:
//
// pipeline := F.Pipe3(
// fetchData(id),
// TapSLog[Data]("Data fetched"),
// Chain(func(d Data) ReaderIOResult[Result] {
// if d.IsValid() {
// return Of(processData(d))
// }
// return Left[Result](errors.New("invalid data"))
// }),
// TapSLog[Result]("Data processed"),
// )
//
// // If fetchData succeeds: logs "Data fetched" with the data
// // If processing succeeds: logs "Data processed" with the result
// // If processing fails: "Data processed" is NOT logged (error propagates)
//
// Use Cases:
// - Debugging: Inspect intermediate successful values in a computation pipeline
// - Monitoring: Track successful data flow through complex operations
// - Troubleshooting: Identify where successful computations stop (last logged value before error)
// - Auditing: Log important successful values for compliance or security
// - Development: Understand data transformations during development
//
// Note: This function only logs successful values. Errors are silently propagated without logging.
// For logging both successes and errors, use SLog instead.
//
//go:inline
func TapSLog[A any](message string) Operator[A, A] {
return readerio.ChainFirst(SLog[A](message))
}

View File

@@ -0,0 +1,662 @@
package readerioresult
import (
"bytes"
"context"
"errors"
"log/slog"
"strconv"
"strings"
"testing"
"time"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/logging"
N "github.com/IBM/fp-go/v2/number"
"github.com/IBM/fp-go/v2/result"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
)
// TestLoggingContext tests basic nested logging with correlation IDs
func TestLoggingContext(t *testing.T) {
data := F.Pipe2(
Of("Sample"),
LogEntryExit[string]("TestLoggingContext1"),
LogEntryExit[string]("TestLoggingContext2"),
)
assert.Equal(t, result.Of("Sample"), data(context.Background())())
}
// TestLogEntryExitSuccess tests successful operation logging
func TestLogEntryExitSuccess(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
operation := F.Pipe1(
Of("success value"),
LogEntryExit[string]("TestOperation"),
)
res := operation(context.Background())()
assert.Equal(t, result.Of("success value"), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "[entering]")
assert.Contains(t, logOutput, "[exiting ]")
assert.Contains(t, logOutput, "TestOperation")
assert.Contains(t, logOutput, "ID=")
assert.Contains(t, logOutput, "duration=")
}
// TestLogEntryExitError tests error operation logging
func TestLogEntryExitError(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
testErr := errors.New("test error")
operation := F.Pipe1(
Left[string](testErr),
LogEntryExit[string]("FailingOperation"),
)
res := operation(context.Background())()
assert.True(t, result.IsLeft(res))
logOutput := buf.String()
assert.Contains(t, logOutput, "[entering]")
assert.Contains(t, logOutput, "[throwing]")
assert.Contains(t, logOutput, "FailingOperation")
assert.Contains(t, logOutput, "test error")
assert.Contains(t, logOutput, "ID=")
assert.Contains(t, logOutput, "duration=")
}
// TestLogEntryExitNested tests nested operations with different IDs
func TestLogEntryExitNested(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
innerOp := F.Pipe1(
Of("inner"),
LogEntryExit[string]("InnerOp"),
)
outerOp := F.Pipe2(
Of("outer"),
LogEntryExit[string]("OuterOp"),
Chain(func(s string) ReaderIOResult[string] {
return innerOp
}),
)
res := outerOp(context.Background())()
assert.True(t, result.IsRight(res))
logOutput := buf.String()
// Should have two different IDs
assert.Contains(t, logOutput, "OuterOp")
assert.Contains(t, logOutput, "InnerOp")
// Count entering and exiting logs
enterCount := strings.Count(logOutput, "[entering]")
exitCount := strings.Count(logOutput, "[exiting ]")
assert.Equal(t, 2, enterCount, "Should have 2 entering logs")
assert.Equal(t, 2, exitCount, "Should have 2 exiting logs")
}
// TestLogEntryExitWithCallback tests custom log level and callback
func TestLogEntryExitWithCallback(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelDebug,
}))
customCallback := func(ctx context.Context) *slog.Logger {
return logger
}
operation := F.Pipe1(
Of(42),
LogEntryExitWithCallback[int](slog.LevelDebug, customCallback, "DebugOperation"),
)
res := operation(context.Background())()
assert.Equal(t, result.Of(42), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "[entering]")
assert.Contains(t, logOutput, "[exiting ]")
assert.Contains(t, logOutput, "DebugOperation")
assert.Contains(t, logOutput, "level=DEBUG")
}
// TestLogEntryExitDisabled tests that logging can be disabled
func TestLogEntryExitDisabled(t *testing.T) {
var buf bytes.Buffer
// Create logger with level that disables info logs
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelError, // Only log errors
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
operation := F.Pipe1(
Of("value"),
LogEntryExit[string]("DisabledOperation"),
)
res := operation(context.Background())()
assert.True(t, result.IsRight(res))
// Should have no logs since level is ERROR
logOutput := buf.String()
assert.Empty(t, logOutput, "Should have no logs when logging is disabled")
}
// TestLogEntryExitF tests custom entry/exit callbacks
func TestLogEntryExitF(t *testing.T) {
var entryCount, exitCount int
onEntry := func(ctx context.Context) IO[context.Context] {
return func() context.Context {
entryCount++
return ctx
}
}
onExit := func(res Result[string]) ReaderIO[any] {
return func(ctx context.Context) IO[any] {
return func() any {
exitCount++
return nil
}
}
}
operation := F.Pipe1(
Of("test"),
LogEntryExitF(onEntry, onExit),
)
res := operation(context.Background())()
assert.True(t, result.IsRight(res))
assert.Equal(t, 1, entryCount, "Entry callback should be called once")
assert.Equal(t, 1, exitCount, "Exit callback should be called once")
}
// TestLogEntryExitFWithError tests custom callbacks with error
func TestLogEntryExitFWithError(t *testing.T) {
var entryCount, exitCount int
var capturedError error
onEntry := func(ctx context.Context) IO[context.Context] {
return func() context.Context {
entryCount++
return ctx
}
}
onExit := func(res Result[string]) ReaderIO[any] {
return func(ctx context.Context) IO[any] {
return func() any {
exitCount++
if result.IsLeft(res) {
_, capturedError = result.Unwrap(res)
}
return nil
}
}
}
testErr := errors.New("custom error")
operation := F.Pipe1(
Left[string](testErr),
LogEntryExitF(onEntry, onExit),
)
res := operation(context.Background())()
assert.True(t, result.IsLeft(res))
assert.Equal(t, 1, entryCount, "Entry callback should be called once")
assert.Equal(t, 1, exitCount, "Exit callback should be called once")
assert.Equal(t, testErr, capturedError, "Should capture the error")
}
// TestLoggingIDUniqueness tests that logging IDs are unique
func TestLoggingIDUniqueness(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
// Run multiple operations
for i := range 5 {
op := F.Pipe1(
Of(i),
LogEntryExit[int]("Operation"),
)
op(context.Background())()
}
logOutput := buf.String()
// Extract all IDs and verify they're unique
lines := strings.Split(logOutput, "\n")
ids := make(map[string]bool)
for _, line := range lines {
if strings.Contains(line, "ID=") {
// Extract ID value
parts := strings.Split(line, "ID=")
if len(parts) > 1 {
idPart := strings.Fields(parts[1])[0]
ids[idPart] = true
}
}
}
// Should have 5 unique IDs (one per operation)
assert.GreaterOrEqual(t, len(ids), 5, "Should have at least 5 unique IDs")
}
// TestLogEntryExitWithContextLogger tests using logger from context
func TestLogEntryExitWithContextLogger(t *testing.T) {
var buf bytes.Buffer
contextLogger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
ctx := logging.WithLogger(contextLogger)(context.Background())
operation := F.Pipe1(
Of("context value"),
LogEntryExit[string]("ContextOperation"),
)
res := operation(ctx)()
assert.True(t, result.IsRight(res))
logOutput := buf.String()
assert.Contains(t, logOutput, "[entering]")
assert.Contains(t, logOutput, "[exiting ]")
assert.Contains(t, logOutput, "ContextOperation")
}
// TestLogEntryExitTiming tests that duration is captured
func TestLogEntryExitTiming(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
// Operation with delay
slowOp := func(ctx context.Context) IOResult[string] {
return func() Result[string] {
time.Sleep(10 * time.Millisecond)
return result.Of("done")
}
}
operation := F.Pipe1(
slowOp,
LogEntryExit[string]("SlowOperation"),
)
res := operation(context.Background())()
assert.True(t, result.IsRight(res))
logOutput := buf.String()
assert.Contains(t, logOutput, "duration=")
// Verify duration is present in exit log
lines := strings.Split(logOutput, "\n")
var foundDuration bool
for _, line := range lines {
if strings.Contains(line, "[exiting ]") && strings.Contains(line, "duration=") {
foundDuration = true
break
}
}
assert.True(t, foundDuration, "Exit log should contain duration")
}
// TestLogEntryExitChainedOperations tests complex chained operations
func TestLogEntryExitChainedOperations(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
step1 := F.Pipe1(
Of(1),
LogEntryExit[int]("Step1"),
)
step2 := F.Flow3(
N.Mul(2),
Of,
LogEntryExit[int]("Step2"),
)
step3 := F.Flow3(
strconv.Itoa,
Of,
LogEntryExit[string]("Step3"),
)
pipeline := F.Pipe1(
step1,
Chain(F.Flow2(
step2,
Chain(step3),
)),
)
res := pipeline(context.Background())()
assert.Equal(t, result.Of("2"), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "Step1")
assert.Contains(t, logOutput, "Step2")
assert.Contains(t, logOutput, "Step3")
// Verify all steps completed
assert.Equal(t, 3, strings.Count(logOutput, "[entering]"))
assert.Equal(t, 3, strings.Count(logOutput, "[exiting ]"))
}
// TestTapSLog tests basic TapSLog functionality
func TestTapSLog(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
operation := F.Pipe2(
Of(42),
TapSLog[int]("Processing value"),
Map(N.Mul(2)),
)
res := operation(context.Background())()
assert.Equal(t, result.Of(84), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "Processing value")
assert.Contains(t, logOutput, "value=42")
}
// TestTapSLogInPipeline tests TapSLog in a multi-step pipeline
func TestTapSLogInPipeline(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
step1 := F.Pipe2(
Of("hello"),
TapSLog[string]("Step 1: Initial value"),
Map(func(s string) string { return s + " world" }),
)
step2 := F.Pipe2(
step1,
TapSLog[string]("Step 2: After concatenation"),
Map(S.Size),
)
pipeline := F.Pipe1(
step2,
TapSLog[int]("Step 3: Final length"),
)
res := pipeline(context.Background())()
assert.Equal(t, result.Of(11), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "Step 1: Initial value")
assert.Contains(t, logOutput, "value=hello")
assert.Contains(t, logOutput, "Step 2: After concatenation")
assert.Contains(t, logOutput, `value="hello world"`)
assert.Contains(t, logOutput, "Step 3: Final length")
assert.Contains(t, logOutput, "value=11")
}
// TestTapSLogWithError tests that TapSLog logs errors (via SLog)
func TestTapSLogWithError(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
testErr := errors.New("computation failed")
pipeline := F.Pipe2(
Left[int](testErr),
TapSLog[int]("Error logged"),
Map(N.Mul(2)),
)
res := pipeline(context.Background())()
assert.True(t, result.IsLeft(res))
logOutput := buf.String()
// TapSLog uses SLog internally, which logs both successes and errors
assert.Contains(t, logOutput, "Error logged")
assert.Contains(t, logOutput, "error")
assert.Contains(t, logOutput, "computation failed")
}
// TestTapSLogWithStruct tests TapSLog with structured data
func TestTapSLogWithStruct(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
type User struct {
ID int
Name string
}
user := User{ID: 123, Name: "Alice"}
operation := F.Pipe2(
Of(user),
TapSLog[User]("User data"),
Map(func(u User) string { return u.Name }),
)
res := operation(context.Background())()
assert.Equal(t, result.Of("Alice"), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "User data")
assert.Contains(t, logOutput, "ID:123")
assert.Contains(t, logOutput, "Name:Alice")
}
// TestTapSLogDisabled tests that TapSLog respects logger level
func TestTapSLogDisabled(t *testing.T) {
var buf bytes.Buffer
// Create logger with level that disables info logs
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelError, // Only log errors
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
operation := F.Pipe2(
Of(42),
TapSLog[int]("This should not be logged"),
Map(N.Mul(2)),
)
res := operation(context.Background())()
assert.Equal(t, result.Of(84), res)
// Should have no logs since level is ERROR
logOutput := buf.String()
assert.Empty(t, logOutput, "Should have no logs when logging is disabled")
}
// TestTapSLogWithContextLogger tests TapSLog using logger from context
func TestTapSLogWithContextLogger(t *testing.T) {
var buf bytes.Buffer
contextLogger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
ctx := logging.WithLogger(contextLogger)(context.Background())
operation := F.Pipe2(
Of("test value"),
TapSLog[string]("Context logger test"),
Map(S.Size),
)
res := operation(ctx)()
assert.Equal(t, result.Of(10), res)
logOutput := buf.String()
assert.Contains(t, logOutput, "Context logger test")
assert.Contains(t, logOutput, `value="test value"`)
}
// TestSLogLogsSuccessValue tests that SLog logs successful Result values
func TestSLogLogsSuccessValue(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
// Create a Result and log it
res1 := result.Of(42)
logged := SLog[int]("Result value")(res1)(ctx)()
assert.Equal(t, result.Of(42), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Result value")
assert.Contains(t, logOutput, "value=42")
}
// TestSLogLogsErrorValue tests that SLog logs error Result values
func TestSLogLogsErrorValue(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
testErr := errors.New("test error")
// Create an error Result and log it
res1 := result.Left[int](testErr)
logged := SLog[int]("Result value")(res1)(ctx)()
assert.True(t, result.IsLeft(logged))
logOutput := buf.String()
assert.Contains(t, logOutput, "Result value")
assert.Contains(t, logOutput, "error")
assert.Contains(t, logOutput, "test error")
}
// TestSLogWithCallbackCustomLevel tests SLogWithCallback with custom log level
func TestSLogWithCallbackCustomLevel(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelDebug,
}))
customCallback := func(ctx context.Context) *slog.Logger {
return logger
}
ctx := context.Background()
// Create a Result and log it with custom callback
res1 := result.Of(42)
logged := SLogWithCallback[int](slog.LevelDebug, customCallback, "Debug result")(res1)(ctx)()
assert.Equal(t, result.Of(42), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Debug result")
assert.Contains(t, logOutput, "value=42")
assert.Contains(t, logOutput, "level=DEBUG")
}
// TestSLogWithCallbackLogsError tests SLogWithCallback logs errors
func TestSLogWithCallbackLogsError(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelWarn,
}))
customCallback := func(ctx context.Context) *slog.Logger {
return logger
}
ctx := context.Background()
testErr := errors.New("warning error")
// Create an error Result and log it with custom callback
res1 := result.Left[int](testErr)
logged := SLogWithCallback[int](slog.LevelWarn, customCallback, "Warning result")(res1)(ctx)()
assert.True(t, result.IsLeft(logged))
logOutput := buf.String()
assert.Contains(t, logOutput, "Warning result")
assert.Contains(t, logOutput, "error")
assert.Contains(t, logOutput, "warning error")
assert.Contains(t, logOutput, "level=WARN")
}

View File

@@ -19,6 +19,7 @@ import (
"context"
"time"
"github.com/IBM/fp-go/v2/context/readerio"
"github.com/IBM/fp-go/v2/context/readerresult"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/errors"
@@ -26,10 +27,11 @@ import (
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioeither"
"github.com/IBM/fp-go/v2/ioresult"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readerio"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
"github.com/IBM/fp-go/v2/readeroption"
"github.com/IBM/fp-go/v2/result"
)
const (
@@ -150,7 +152,7 @@ func MapTo[A, B any](b B) Operator[A, B] {
//
//go:inline
func MonadChain[A, B any](ma ReaderIOResult[A], f Kleisli[A, B]) ReaderIOResult[B] {
return RIOR.MonadChain(ma, f)
return RIOR.MonadChain(ma, WithContextK(f))
}
// Chain sequences two [ReaderIOResult] computations, where the second depends on the result of the first.
@@ -163,7 +165,7 @@ func MonadChain[A, B any](ma ReaderIOResult[A], f Kleisli[A, B]) ReaderIOResult[
//
//go:inline
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B] {
return RIOR.Chain(f)
return RIOR.Chain(WithContextK(f))
}
// MonadChainFirst sequences two [ReaderIOResult] computations but returns the result of the first.
@@ -177,12 +179,12 @@ func Chain[A, B any](f Kleisli[A, B]) Operator[A, B] {
//
//go:inline
func MonadChainFirst[A, B any](ma ReaderIOResult[A], f Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadChainFirst(ma, f)
return RIOR.MonadChainFirst(ma, WithContextK(f))
}
//go:inline
func MonadTap[A, B any](ma ReaderIOResult[A], f Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadTap(ma, f)
return RIOR.MonadTap(ma, WithContextK(f))
}
// ChainFirst sequences two [ReaderIOResult] computations but returns the result of the first.
@@ -195,12 +197,12 @@ func MonadTap[A, B any](ma ReaderIOResult[A], f Kleisli[A, B]) ReaderIOResult[A]
//
//go:inline
func ChainFirst[A, B any](f Kleisli[A, B]) Operator[A, A] {
return RIOR.ChainFirst(f)
return RIOR.ChainFirst(WithContextK(f))
}
//go:inline
func Tap[A, B any](f Kleisli[A, B]) Operator[A, A] {
return RIOR.Tap(f)
return RIOR.Tap(WithContextK(f))
}
// Of creates a [ReaderIOResult] that always succeeds with the given value.
@@ -243,14 +245,14 @@ func MonadApPar[B, A any](fab ReaderIOResult[func(A) B], fa ReaderIOResult[A]) R
return func(ctx context.Context) IOResult[B] {
// quick check for cancellation
if err := context.Cause(ctx); err != nil {
return ioeither.Left[B](err)
if ctx.Err() != nil {
return ioeither.Left[B](context.Cause(ctx))
}
return func() Result[B] {
// quick check for cancellation
if err := context.Cause(ctx); err != nil {
return either.Left[B](err)
if ctx.Err() != nil {
return either.Left[B](context.Cause(ctx))
}
// create sub-contexts for fa and fab, so they can cancel one other
@@ -382,7 +384,7 @@ func Ask() ReaderIOResult[context.Context] {
// Returns a new ReaderIOResult with the chained computation.
//
//go:inline
func MonadChainEitherK[A, B any](ma ReaderIOResult[A], f func(A) Either[B]) ReaderIOResult[B] {
func MonadChainEitherK[A, B any](ma ReaderIOResult[A], f either.Kleisli[error, A, B]) ReaderIOResult[B] {
return RIOR.MonadChainEitherK(ma, f)
}
@@ -395,7 +397,12 @@ func MonadChainEitherK[A, B any](ma ReaderIOResult[A], f func(A) Either[B]) Read
// Returns a function that chains the Either-returning function.
//
//go:inline
func ChainEitherK[A, B any](f func(A) Either[B]) Operator[A, B] {
func ChainEitherK[A, B any](f either.Kleisli[error, A, B]) Operator[A, B] {
return RIOR.ChainEitherK[context.Context](f)
}
//go:inline
func ChainResultK[A, B any](f either.Kleisli[error, A, B]) Operator[A, B] {
return RIOR.ChainEitherK[context.Context](f)
}
@@ -409,12 +416,12 @@ func ChainEitherK[A, B any](f func(A) Either[B]) Operator[A, B] {
// Returns a ReaderIOResult with the original value if both computations succeed.
//
//go:inline
func MonadChainFirstEitherK[A, B any](ma ReaderIOResult[A], f func(A) Either[B]) ReaderIOResult[A] {
func MonadChainFirstEitherK[A, B any](ma ReaderIOResult[A], f either.Kleisli[error, A, B]) ReaderIOResult[A] {
return RIOR.MonadChainFirstEitherK(ma, f)
}
//go:inline
func MonadTapEitherK[A, B any](ma ReaderIOResult[A], f func(A) Either[B]) ReaderIOResult[A] {
func MonadTapEitherK[A, B any](ma ReaderIOResult[A], f either.Kleisli[error, A, B]) ReaderIOResult[A] {
return RIOR.MonadTapEitherK(ma, f)
}
@@ -427,12 +434,12 @@ func MonadTapEitherK[A, B any](ma ReaderIOResult[A], f func(A) Either[B]) Reader
// Returns a function that chains the Either-returning function.
//
//go:inline
func ChainFirstEitherK[A, B any](f func(A) Either[B]) Operator[A, A] {
func ChainFirstEitherK[A, B any](f either.Kleisli[error, A, B]) Operator[A, A] {
return RIOR.ChainFirstEitherK[context.Context](f)
}
//go:inline
func TapEitherK[A, B any](f func(A) Either[B]) Operator[A, A] {
func TapEitherK[A, B any](f either.Kleisli[error, A, B]) Operator[A, A] {
return RIOR.TapEitherK[context.Context](f)
}
@@ -445,7 +452,7 @@ func TapEitherK[A, B any](f func(A) Either[B]) Operator[A, A] {
// Returns a function that chains Option-returning functions into ReaderIOResult.
//
//go:inline
func ChainOptionK[A, B any](onNone func() error) func(func(A) Option[B]) Operator[A, B] {
func ChainOptionK[A, B any](onNone func() error) func(option.Kleisli[A, B]) Operator[A, B] {
return RIOR.ChainOptionK[context.Context, A, B](onNone)
}
@@ -527,7 +534,7 @@ func Never[A any]() ReaderIOResult[A] {
// Returns a new ReaderIOResult with the chained IO computation.
//
//go:inline
func MonadChainIOK[A, B any](ma ReaderIOResult[A], f func(A) IO[B]) ReaderIOResult[B] {
func MonadChainIOK[A, B any](ma ReaderIOResult[A], f io.Kleisli[A, B]) ReaderIOResult[B] {
return RIOR.MonadChainIOK(ma, f)
}
@@ -540,7 +547,7 @@ func MonadChainIOK[A, B any](ma ReaderIOResult[A], f func(A) IO[B]) ReaderIOResu
// Returns a function that chains the IO-returning function.
//
//go:inline
func ChainIOK[A, B any](f func(A) IO[B]) Operator[A, B] {
func ChainIOK[A, B any](f io.Kleisli[A, B]) Operator[A, B] {
return RIOR.ChainIOK[context.Context](f)
}
@@ -554,12 +561,12 @@ func ChainIOK[A, B any](f func(A) IO[B]) Operator[A, B] {
// Returns a ReaderIOResult with the original value after executing the IO.
//
//go:inline
func MonadChainFirstIOK[A, B any](ma ReaderIOResult[A], f func(A) IO[B]) ReaderIOResult[A] {
func MonadChainFirstIOK[A, B any](ma ReaderIOResult[A], f io.Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadChainFirstIOK(ma, f)
}
//go:inline
func MonadTapIOK[A, B any](ma ReaderIOResult[A], f func(A) IO[B]) ReaderIOResult[A] {
func MonadTapIOK[A, B any](ma ReaderIOResult[A], f io.Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadTapIOK(ma, f)
}
@@ -572,12 +579,12 @@ func MonadTapIOK[A, B any](ma ReaderIOResult[A], f func(A) IO[B]) ReaderIOResult
// Returns a function that chains the IO-returning function.
//
//go:inline
func ChainFirstIOK[A, B any](f func(A) IO[B]) Operator[A, A] {
func ChainFirstIOK[A, B any](f io.Kleisli[A, B]) Operator[A, A] {
return RIOR.ChainFirstIOK[context.Context](f)
}
//go:inline
func TapIOK[A, B any](f func(A) IO[B]) Operator[A, A] {
func TapIOK[A, B any](f io.Kleisli[A, B]) Operator[A, A] {
return RIOR.TapIOK[context.Context](f)
}
@@ -590,7 +597,7 @@ func TapIOK[A, B any](f func(A) IO[B]) Operator[A, A] {
// Returns a function that chains the IOResult-returning function.
//
//go:inline
func ChainIOEitherK[A, B any](f func(A) IOResult[B]) Operator[A, B] {
func ChainIOEitherK[A, B any](f ioresult.Kleisli[A, B]) Operator[A, B] {
return RIOR.ChainIOEitherK[context.Context](f)
}
@@ -753,7 +760,7 @@ func Flap[B, A any](a A) Operator[func(A) B, B] {
//
//go:inline
func Fold[A, B any](onLeft Kleisli[error, B], onRight Kleisli[A, B]) Operator[A, B] {
return RIOR.Fold(onLeft, onRight)
return RIOR.Fold(function.Flow2(onLeft, WithContext), function.Flow2(onRight, WithContext))
}
// GetOrElse extracts the value from a [ReaderIOResult], providing a default via a function if it fails.
@@ -765,7 +772,7 @@ func Fold[A, B any](onLeft Kleisli[error, B], onRight Kleisli[A, B]) Operator[A,
// Returns a function that converts a ReaderIOResult to a ReaderIO.
//
//go:inline
func GetOrElse[A any](onLeft func(error) ReaderIO[A]) func(ReaderIOResult[A]) ReaderIO[A] {
func GetOrElse[A any](onLeft readerio.Kleisli[error, A]) func(ReaderIOResult[A]) ReaderIO[A] {
return RIOR.GetOrElse(onLeft)
}
@@ -858,32 +865,32 @@ func TapReaderResultK[A, B any](f readerresult.Kleisli[A, B]) Operator[A, A] {
}
//go:inline
func MonadChainReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[context.Context, A, B]) ReaderIOResult[B] {
func MonadChainReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[A, B]) ReaderIOResult[B] {
return RIOR.MonadChainReaderIOK(ma, f)
}
//go:inline
func ChainReaderIOK[A, B any](f readerio.Kleisli[context.Context, A, B]) Operator[A, B] {
func ChainReaderIOK[A, B any](f readerio.Kleisli[A, B]) Operator[A, B] {
return RIOR.ChainReaderIOK(f)
}
//go:inline
func MonadChainFirstReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[context.Context, A, B]) ReaderIOResult[A] {
func MonadChainFirstReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadChainFirstReaderIOK(ma, f)
}
//go:inline
func MonadTapReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[context.Context, A, B]) ReaderIOResult[A] {
func MonadTapReaderIOK[A, B any](ma ReaderIOResult[A], f readerio.Kleisli[A, B]) ReaderIOResult[A] {
return RIOR.MonadTapReaderIOK(ma, f)
}
//go:inline
func ChainFirstReaderIOK[A, B any](f readerio.Kleisli[context.Context, A, B]) Operator[A, A] {
func ChainFirstReaderIOK[A, B any](f readerio.Kleisli[A, B]) Operator[A, A] {
return RIOR.ChainFirstReaderIOK(f)
}
//go:inline
func TapReaderIOK[A, B any](f readerio.Kleisli[context.Context, A, B]) Operator[A, A] {
func TapReaderIOK[A, B any](f readerio.Kleisli[A, B]) Operator[A, A] {
return RIOR.TapReaderIOK(f)
}
@@ -913,15 +920,15 @@ func Read[A any](r context.Context) func(ReaderIOResult[A]) IOResult[A] {
//
//go:inline
func MonadChainLeft[A any](fa ReaderIOResult[A], f Kleisli[error, A]) ReaderIOResult[A] {
return RIOR.MonadChainLeft(fa, f)
return RIOR.MonadChainLeft(fa, WithContextK(f))
}
// ChainLeft is the curried version of [MonadChainLeft].
// It returns a function that chains a computation on the left (error) side of a [ReaderIOResult].
//
//go:inline
func ChainLeft[A any](f Kleisli[error, A]) func(ReaderIOResult[A]) ReaderIOResult[A] {
return RIOR.ChainLeft(f)
func ChainLeft[A any](f Kleisli[error, A]) Operator[A, A] {
return RIOR.ChainLeft(WithContextK(f))
}
// MonadChainFirstLeft chains a computation on the left (error) side but always returns the original error.
@@ -934,12 +941,12 @@ func ChainLeft[A any](f Kleisli[error, A]) func(ReaderIOResult[A]) ReaderIOResul
//
//go:inline
func MonadChainFirstLeft[A, B any](ma ReaderIOResult[A], f Kleisli[error, B]) ReaderIOResult[A] {
return RIOR.MonadChainFirstLeft(ma, f)
return RIOR.MonadChainFirstLeft(ma, WithContextK(f))
}
//go:inline
func MonadTapLeft[A, B any](ma ReaderIOResult[A], f Kleisli[error, B]) ReaderIOResult[A] {
return RIOR.MonadTapLeft(ma, f)
return RIOR.MonadTapLeft(ma, WithContextK(f))
}
// ChainFirstLeft is the curried version of [MonadChainFirstLeft].
@@ -951,10 +958,212 @@ func MonadTapLeft[A, B any](ma ReaderIOResult[A], f Kleisli[error, B]) ReaderIOR
//
//go:inline
func ChainFirstLeft[A, B any](f Kleisli[error, B]) Operator[A, A] {
return RIOR.ChainFirstLeft[A](f)
return RIOR.ChainFirstLeft[A](WithContextK(f))
}
//go:inline
func TapLeft[A, B any](f Kleisli[error, B]) Operator[A, A] {
return RIOR.TapLeft[A](f)
return RIOR.TapLeft[A](WithContextK(f))
}
// Local transforms the context.Context environment before passing it to a ReaderIOResult computation.
//
// This is the Reader's local operation, which allows you to modify the environment
// for a specific computation without affecting the outer context. The transformation
// function receives the current context and returns a new context along with a
// cancel function. The cancel function is automatically called when the computation
// completes (via defer), ensuring proper cleanup of resources.
//
// The function checks for context cancellation before applying the transformation,
// returning an error immediately if the context is already cancelled.
//
// This is useful for:
// - Adding timeouts or deadlines to specific operations
// - Adding context values for nested computations
// - Creating isolated context scopes
// - Implementing context-based dependency injection
//
// Type Parameters:
// - A: The value type of the ReaderIOResult
//
// Parameters:
// - f: A function that transforms the context and returns a cancel function
//
// Returns:
// - An Operator that runs the computation with the transformed context
//
// Example:
//
// import F "github.com/IBM/fp-go/v2/function"
//
// // Add a custom value to the context
// type key int
// const userKey key = 0
//
// addUser := readerioresult.Local[string](func(ctx context.Context) (context.Context, context.CancelFunc) {
// newCtx := context.WithValue(ctx, userKey, "Alice")
// return newCtx, func() {} // No-op cancel
// })
//
// getUser := readerioresult.FromReader(func(ctx context.Context) string {
// if user := ctx.Value(userKey); user != nil {
// return user.(string)
// }
// return "unknown"
// })
//
// result := F.Pipe1(
// getUser,
// addUser,
// )
// value, err := result(context.Background())() // Returns ("Alice", nil)
//
// Timeout Example:
//
// // Add a 5-second timeout to a specific operation
// withTimeout := readerioresult.Local[Data](func(ctx context.Context) (context.Context, context.CancelFunc) {
// return context.WithTimeout(ctx, 5*time.Second)
// })
//
// result := F.Pipe1(
// fetchData,
// withTimeout,
// )
func Local[A any](f func(context.Context) (context.Context, context.CancelFunc)) Operator[A, A] {
return func(rr ReaderIOResult[A]) ReaderIOResult[A] {
return func(ctx context.Context) IOResult[A] {
return func() Result[A] {
if ctx.Err() != nil {
return result.Left[A](context.Cause(ctx))
}
otherCtx, otherCancel := f(ctx)
defer otherCancel()
return rr(otherCtx)()
}
}
}
}
// WithTimeout adds a timeout to the context for a ReaderIOResult computation.
//
// This is a convenience wrapper around Local that uses context.WithTimeout.
// The computation must complete within the specified duration, or it will be
// cancelled. This is useful for ensuring operations don't run indefinitely
// and for implementing timeout-based error handling.
//
// The timeout is relative to when the ReaderIOResult is executed, not when
// WithTimeout is called. The cancel function is automatically called when
// the computation completes, ensuring proper cleanup. If the timeout expires,
// the computation will receive a context.DeadlineExceeded error.
//
// Type Parameters:
// - A: The value type of the ReaderIOResult
//
// Parameters:
// - timeout: The maximum duration for the computation
//
// Returns:
// - An Operator that runs the computation with a timeout
//
// Example:
//
// import (
// "time"
// F "github.com/IBM/fp-go/v2/function"
// )
//
// // Fetch data with a 5-second timeout
// fetchData := readerioresult.FromReader(func(ctx context.Context) Data {
// // Simulate slow operation
// select {
// case <-time.After(10 * time.Second):
// return Data{Value: "slow"}
// case <-ctx.Done():
// return Data{}
// }
// })
//
// result := F.Pipe1(
// fetchData,
// readerioresult.WithTimeout[Data](5*time.Second),
// )
// value, err := result(context.Background())() // Returns (Data{}, context.DeadlineExceeded) after 5s
//
// Successful Example:
//
// quickFetch := readerioresult.Right(Data{Value: "quick"})
// result := F.Pipe1(
// quickFetch,
// readerioresult.WithTimeout[Data](5*time.Second),
// )
// value, err := result(context.Background())() // Returns (Data{Value: "quick"}, nil)
func WithTimeout[A any](timeout time.Duration) Operator[A, A] {
return Local[A](func(ctx context.Context) (context.Context, context.CancelFunc) {
return context.WithTimeout(ctx, timeout)
})
}
// WithDeadline adds an absolute deadline to the context for a ReaderIOResult computation.
//
// This is a convenience wrapper around Local that uses context.WithDeadline.
// The computation must complete before the specified time, or it will be
// cancelled. This is useful for coordinating operations that must finish
// by a specific time, such as request deadlines or scheduled tasks.
//
// The deadline is an absolute time, unlike WithTimeout which uses a relative
// duration. The cancel function is automatically called when the computation
// completes, ensuring proper cleanup. If the deadline passes, the computation
// will receive a context.DeadlineExceeded error.
//
// Type Parameters:
// - A: The value type of the ReaderIOResult
//
// Parameters:
// - deadline: The absolute time by which the computation must complete
//
// Returns:
// - An Operator that runs the computation with a deadline
//
// Example:
//
// import (
// "time"
// F "github.com/IBM/fp-go/v2/function"
// )
//
// // Operation must complete by 3 PM
// deadline := time.Date(2024, 1, 1, 15, 0, 0, 0, time.UTC)
//
// fetchData := readerioresult.FromReader(func(ctx context.Context) Data {
// // Simulate operation
// select {
// case <-time.After(1 * time.Hour):
// return Data{Value: "done"}
// case <-ctx.Done():
// return Data{}
// }
// })
//
// result := F.Pipe1(
// fetchData,
// readerioresult.WithDeadline[Data](deadline),
// )
// value, err := result(context.Background())() // Returns (Data{}, context.DeadlineExceeded) if past deadline
//
// Combining with Parent Context:
//
// // If parent context already has a deadline, the earlier one takes precedence
// parentCtx, cancel := context.WithDeadline(context.Background(), time.Now().Add(1*time.Hour))
// defer cancel()
//
// laterDeadline := time.Now().Add(2 * time.Hour)
// result := F.Pipe1(
// fetchData,
// readerioresult.WithDeadline[Data](laterDeadline),
// )
// value, err := result(parentCtx)() // Will use parent's 1-hour deadline
func WithDeadline[A any](deadline time.Time) Operator[A, A] {
return Local[A](func(ctx context.Context) (context.Context, context.CancelFunc) {
return context.WithDeadline(ctx, deadline)
})
}

View File

@@ -567,15 +567,13 @@ func TestMemoize(t *testing.T) {
res1 := computation(context.Background())()
assert.True(t, E.IsRight(res1))
val1 := E.ToOption(res1)
v1, _ := O.Unwrap(val1)
assert.Equal(t, 1, v1)
assert.Equal(t, O.Of(1), val1)
// Second execution should return cached value
res2 := computation(context.Background())()
assert.True(t, E.IsRight(res2))
val2 := E.ToOption(res2)
v2, _ := O.Unwrap(val2)
assert.Equal(t, 1, v2)
assert.Equal(t, O.Of(1), val2)
// Counter should only be incremented once
assert.Equal(t, 1, counter)
@@ -739,9 +737,7 @@ func TestTraverseArray(t *testing.T) {
res := result(context.Background())()
assert.True(t, E.IsRight(res))
arrOpt := E.ToOption(res)
assert.True(t, O.IsSome(arrOpt))
resultArr, _ := O.Unwrap(arrOpt)
assert.Equal(t, []int{2, 4, 6}, resultArr)
assert.Equal(t, O.Of([]int{2, 4, 6}), arrOpt)
})
t.Run("TraverseArray with error", func(t *testing.T) {
@@ -765,9 +761,7 @@ func TestSequenceArray(t *testing.T) {
res := result(context.Background())()
assert.True(t, E.IsRight(res))
arrOpt := E.ToOption(res)
assert.True(t, O.IsSome(arrOpt))
resultArr, _ := O.Unwrap(arrOpt)
assert.Equal(t, []int{1, 2, 3}, resultArr)
assert.Equal(t, O.Of([]int{1, 2, 3}), arrOpt)
}
func TestTraverseRecord(t *testing.T) {

View File

@@ -0,0 +1,184 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"github.com/IBM/fp-go/v2/either"
F "github.com/IBM/fp-go/v2/function"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
)
// TailRec implements stack-safe tail recursion for the context-aware ReaderIOResult monad.
//
// This function enables recursive computations that combine four powerful concepts:
// - Context awareness: Automatic cancellation checking via [context.Context]
// - Environment dependency (Reader aspect): Access to configuration, context, or dependencies
// - Side effects (IO aspect): Logging, file I/O, network calls, etc.
// - Error handling (Either aspect): Computations that can fail with an error
//
// The function uses an iterative loop to execute the recursion, making it safe for deep
// or unbounded recursion without risking stack overflow. Additionally, it integrates
// context cancellation checking through [WithContext], ensuring that recursive computations
// can be cancelled gracefully.
//
// # How It Works
//
// TailRec takes a Kleisli arrow that returns Either[A, B]:
// - Left(A): Continue recursion with the new state A
// - Right(B): Terminate recursion successfully and return the final result B
//
// The function wraps each iteration with [WithContext] to ensure context cancellation
// is checked before each recursive step. If the context is cancelled, the recursion
// terminates early with a context cancellation error.
//
// # Type Parameters
//
// - A: The state type that changes during recursion
// - B: The final result type when recursion terminates successfully
//
// # Parameters
//
// - f: A Kleisli arrow (A => ReaderIOResult[Either[A, B]]) that:
// - Takes the current state A
// - Returns a ReaderIOResult that depends on [context.Context]
// - Can fail with error (Left in the outer Either)
// - Produces Either[A, B] to control recursion flow (Right in the outer Either)
//
// # Returns
//
// A Kleisli arrow (A => ReaderIOResult[B]) that:
// - Takes an initial state A
// - Returns a ReaderIOResult that requires [context.Context]
// - Can fail with error or context cancellation
// - Produces the final result B after recursion completes
//
// # Context Cancellation
//
// Unlike the base [readerioresult.TailRec], this version automatically integrates
// context cancellation checking:
// - Each recursive iteration checks if the context is cancelled
// - If cancelled, recursion terminates immediately with a cancellation error
// - This prevents runaway recursive computations in cancelled contexts
// - Enables responsive cancellation for long-running recursive operations
//
// # Use Cases
//
// 1. Cancellable recursive algorithms:
// - Tree traversals that can be cancelled mid-operation
// - Graph algorithms with timeout requirements
// - Recursive parsers that respect cancellation
//
// 2. Long-running recursive computations:
// - File system traversals with cancellation support
// - Network operations with timeout handling
// - Database operations with connection timeout awareness
//
// 3. Interactive recursive operations:
// - User-initiated operations that can be cancelled
// - Background tasks with cancellation support
// - Streaming operations with graceful shutdown
//
// # Example: Cancellable Countdown
//
// countdownStep := func(n int) readerioresult.ReaderIOResult[either.Either[int, string]] {
// return func(ctx context.Context) ioeither.IOEither[error, either.Either[int, string]] {
// return func() either.Either[error, either.Either[int, string]] {
// if n <= 0 {
// return either.Right[error](either.Right[int]("Done!"))
// }
// // Simulate some work
// time.Sleep(100 * time.Millisecond)
// return either.Right[error](either.Left[string](n - 1))
// }
// }
// }
//
// countdown := readerioresult.TailRec(countdownStep)
//
// // With cancellation
// ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
// defer cancel()
// result := countdown(10)(ctx)() // Will be cancelled after ~500ms
//
// # Example: Cancellable File Processing
//
// type ProcessState struct {
// files []string
// processed []string
// }
//
// processStep := func(state ProcessState) readerioresult.ReaderIOResult[either.Either[ProcessState, []string]] {
// return func(ctx context.Context) ioeither.IOEither[error, either.Either[ProcessState, []string]] {
// return func() either.Either[error, either.Either[ProcessState, []string]] {
// if len(state.files) == 0 {
// return either.Right[error](either.Right[ProcessState](state.processed))
// }
//
// file := state.files[0]
// // Process file (this could be cancelled via context)
// if err := processFileWithContext(ctx, file); err != nil {
// return either.Left[either.Either[ProcessState, []string]](err)
// }
//
// return either.Right[error](either.Left[[]string](ProcessState{
// files: state.files[1:],
// processed: append(state.processed, file),
// }))
// }
// }
// }
//
// processFiles := readerioresult.TailRec(processStep)
// ctx, cancel := context.WithCancel(context.Background())
//
// // Can be cancelled at any point during processing
// go func() {
// time.Sleep(2 * time.Second)
// cancel() // Cancel after 2 seconds
// }()
//
// result := processFiles(ProcessState{files: manyFiles})(ctx)()
//
// # Stack Safety
//
// The iterative implementation ensures that even deeply recursive computations
// (thousands or millions of iterations) will not cause stack overflow, while
// still respecting context cancellation:
//
// // Safe for very large inputs with cancellation support
// largeCountdown := readerioresult.TailRec(countdownStep)
// ctx := context.Background()
// result := largeCountdown(1000000)(ctx)() // Safe, no stack overflow
//
// # Performance Considerations
//
// - Each iteration includes context cancellation checking overhead
// - Context checking happens before each recursive step
// - For performance-critical code, consider the cancellation checking cost
// - The [WithContext] wrapper adds minimal overhead for cancellation safety
//
// # See Also
//
// - [readerioresult.TailRec]: Base tail recursion without automatic context checking
// - [WithContext]: Context cancellation wrapper used internally
// - [Chain]: For sequencing ReaderIOResult computations
// - [Ask]: For accessing the context
// - [Left]/[Right]: For creating error/success values
//
//go:inline
func TailRec[A, B any](f Kleisli[A, either.Either[A, B]]) Kleisli[A, B] {
return RIOR.TailRec(F.Flow2(f, WithContext))
}

View File

@@ -0,0 +1,433 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"context"
"errors"
"fmt"
"sync/atomic"
"testing"
"time"
A "github.com/IBM/fp-go/v2/array"
E "github.com/IBM/fp-go/v2/either"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTailRec_BasicRecursion(t *testing.T) {
// Test basic countdown recursion
countdownStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
countdown := TailRec(countdownStep)
result := countdown(5)(context.Background())()
assert.Equal(t, E.Of[error]("Done!"), result)
}
func TestTailRec_FactorialRecursion(t *testing.T) {
// Test factorial computation using tail recursion
type FactorialState struct {
n int
acc int
}
factorialStep := func(state FactorialState) ReaderIOResult[E.Either[FactorialState, int]] {
return func(ctx context.Context) IOEither[E.Either[FactorialState, int]] {
return func() Either[E.Either[FactorialState, int]] {
if state.n <= 1 {
return E.Right[error](E.Right[FactorialState](state.acc))
}
return E.Right[error](E.Left[int](FactorialState{
n: state.n - 1,
acc: state.acc * state.n,
}))
}
}
}
factorial := TailRec(factorialStep)
result := factorial(FactorialState{n: 5, acc: 1})(context.Background())()
assert.Equal(t, E.Of[error](120), result) // 5! = 120
}
func TestTailRec_ErrorHandling(t *testing.T) {
// Test that errors are properly propagated
testErr := errors.New("computation error")
errorStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
if n == 3 {
return E.Left[E.Either[int, string]](testErr)
}
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
errorRecursion := TailRec(errorStep)
result := errorRecursion(5)(context.Background())()
assert.True(t, E.IsLeft(result))
err := E.ToError(result)
assert.Equal(t, testErr, err)
}
func TestTailRec_ContextCancellation(t *testing.T) {
// Test that recursion gets cancelled early when context is canceled
var iterationCount int32
slowStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
atomic.AddInt32(&iterationCount, 1)
// Simulate some work
time.Sleep(50 * time.Millisecond)
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
slowRecursion := TailRec(slowStep)
// Create a context that will be cancelled after 100ms
ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond)
defer cancel()
start := time.Now()
result := slowRecursion(10)(ctx)()
elapsed := time.Since(start)
// Should be cancelled and return an error
assert.True(t, E.IsLeft(result))
// Should complete quickly due to cancellation (much less than 10 * 50ms = 500ms)
assert.Less(t, elapsed, 200*time.Millisecond)
// Should have executed only a few iterations before cancellation
iterations := atomic.LoadInt32(&iterationCount)
assert.Less(t, iterations, int32(5), "Should have been cancelled before completing all iterations")
}
func TestTailRec_ImmediateCancellation(t *testing.T) {
// Test with an already cancelled context
countdownStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
countdown := TailRec(countdownStep)
// Create an already cancelled context
ctx, cancel := context.WithCancel(context.Background())
cancel()
result := countdown(5)(ctx)()
// Should immediately return a cancellation error
assert.True(t, E.IsLeft(result))
err := E.ToError(result)
assert.Equal(t, context.Canceled, err)
}
func TestTailRec_StackSafety(t *testing.T) {
// Test that deep recursion doesn't cause stack overflow
const largeN = 10000
countdownStep := func(n int) ReaderIOResult[E.Either[int, int]] {
return func(ctx context.Context) IOEither[E.Either[int, int]] {
return func() Either[E.Either[int, int]] {
if n <= 0 {
return E.Right[error](E.Right[int](0))
}
return E.Right[error](E.Left[int](n - 1))
}
}
}
countdown := TailRec(countdownStep)
result := countdown(largeN)(context.Background())()
assert.Equal(t, E.Of[error](0), result)
}
func TestTailRec_StackSafetyWithCancellation(t *testing.T) {
// Test stack safety with cancellation after many iterations
const largeN = 100000
var iterationCount int32
countdownStep := func(n int) ReaderIOResult[E.Either[int, int]] {
return func(ctx context.Context) IOEither[E.Either[int, int]] {
return func() Either[E.Either[int, int]] {
atomic.AddInt32(&iterationCount, 1)
// Add a small delay every 1000 iterations to make cancellation more likely
if n%1000 == 0 {
time.Sleep(1 * time.Millisecond)
}
if n <= 0 {
return E.Right[error](E.Right[int](0))
}
return E.Right[error](E.Left[int](n - 1))
}
}
}
countdown := TailRec(countdownStep)
// Cancel after 50ms to allow some iterations but not all
ctx, cancel := context.WithTimeout(context.Background(), 50*time.Millisecond)
defer cancel()
result := countdown(largeN)(ctx)()
// Should be cancelled (or completed if very fast)
// The key is that it doesn't cause a stack overflow
iterations := atomic.LoadInt32(&iterationCount)
assert.Greater(t, iterations, int32(0))
// If it was cancelled, verify it didn't complete all iterations
if E.IsLeft(result) {
assert.Less(t, iterations, int32(largeN))
}
}
func TestTailRec_ComplexState(t *testing.T) {
// Test with more complex state management
type ProcessState struct {
items []string
processed []string
errors []error
}
processStep := func(state ProcessState) ReaderIOResult[E.Either[ProcessState, []string]] {
return func(ctx context.Context) IOEither[E.Either[ProcessState, []string]] {
return func() Either[E.Either[ProcessState, []string]] {
if A.IsEmpty(state.items) {
return E.Right[error](E.Right[ProcessState](state.processed))
}
item := state.items[0]
// Simulate processing that might fail for certain items
if item == "error-item" {
return E.Left[E.Either[ProcessState, []string]](
fmt.Errorf("failed to process item: %s", item))
}
return E.Right[error](E.Left[[]string](ProcessState{
items: state.items[1:],
processed: append(state.processed, item),
errors: state.errors,
}))
}
}
}
processItems := TailRec(processStep)
t.Run("successful processing", func(t *testing.T) {
initialState := ProcessState{
items: []string{"item1", "item2", "item3"},
processed: []string{},
errors: []error{},
}
result := processItems(initialState)(context.Background())()
assert.Equal(t, E.Of[error]([]string{"item1", "item2", "item3"}), result)
})
t.Run("processing with error", func(t *testing.T) {
initialState := ProcessState{
items: []string{"item1", "error-item", "item3"},
processed: []string{},
errors: []error{},
}
result := processItems(initialState)(context.Background())()
assert.True(t, E.IsLeft(result))
err := E.ToError(result)
assert.Contains(t, err.Error(), "failed to process item: error-item")
})
}
func TestTailRec_CancellationDuringProcessing(t *testing.T) {
// Test cancellation during a realistic processing scenario
type FileProcessState struct {
files []string
processed int
}
var processedCount int32
processFileStep := func(state FileProcessState) ReaderIOResult[E.Either[FileProcessState, int]] {
return func(ctx context.Context) IOEither[E.Either[FileProcessState, int]] {
return func() Either[E.Either[FileProcessState, int]] {
if A.IsEmpty(state.files) {
return E.Right[error](E.Right[FileProcessState](state.processed))
}
// Simulate file processing time
time.Sleep(20 * time.Millisecond)
atomic.AddInt32(&processedCount, 1)
return E.Right[error](E.Left[int](FileProcessState{
files: state.files[1:],
processed: state.processed + 1,
}))
}
}
}
processFiles := TailRec(processFileStep)
// Create many files to process
files := make([]string, 20)
for i := range files {
files[i] = fmt.Sprintf("file%d.txt", i)
}
initialState := FileProcessState{
files: files,
processed: 0,
}
// Cancel after 100ms (should allow ~5 files to be processed)
ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond)
defer cancel()
start := time.Now()
result := processFiles(initialState)(ctx)()
elapsed := time.Since(start)
// Should be cancelled
assert.True(t, E.IsLeft(result))
// Should complete quickly due to cancellation
assert.Less(t, elapsed, 150*time.Millisecond)
// Should have processed some but not all files
processed := atomic.LoadInt32(&processedCount)
assert.Greater(t, processed, int32(0))
assert.Less(t, processed, int32(20))
}
func TestTailRec_ZeroIterations(t *testing.T) {
// Test case where recursion terminates immediately
immediateStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
return E.Right[error](E.Right[int]("immediate"))
}
}
}
immediate := TailRec(immediateStep)
result := immediate(100)(context.Background())()
assert.Equal(t, E.Of[error]("immediate"), result)
}
func TestTailRec_ContextWithDeadline(t *testing.T) {
// Test with context deadline
var iterationCount int32
slowStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
atomic.AddInt32(&iterationCount, 1)
time.Sleep(30 * time.Millisecond)
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
slowRecursion := TailRec(slowStep)
// Set deadline 80ms from now
ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(80*time.Millisecond))
defer cancel()
result := slowRecursion(10)(ctx)()
// Should be cancelled due to deadline
assert.True(t, E.IsLeft(result))
// Should have executed only a few iterations
iterations := atomic.LoadInt32(&iterationCount)
assert.Greater(t, iterations, int32(0))
assert.Less(t, iterations, int32(5))
}
func TestTailRec_ContextWithValue(t *testing.T) {
// Test that context values are preserved through recursion
type contextKey string
const testKey contextKey = "test"
valueStep := func(n int) ReaderIOResult[E.Either[int, string]] {
return func(ctx context.Context) IOEither[E.Either[int, string]] {
return func() Either[E.Either[int, string]] {
value := ctx.Value(testKey)
require.NotNil(t, value)
assert.Equal(t, "test-value", value.(string))
if n <= 0 {
return E.Right[error](E.Right[int]("Done!"))
}
return E.Right[error](E.Left[string](n - 1))
}
}
}
valueRecursion := TailRec(valueStep)
ctx := context.WithValue(context.Background(), testKey, "test-value")
result := valueRecursion(3)(ctx)()
assert.Equal(t, E.Of[error]("Done!"), result)
}

View File

@@ -16,7 +16,11 @@
package readerioresult
import (
"context"
"io"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
"github.com/IBM/fp-go/v2/result"
)
// WithResource constructs a function that creates a resource, then operates on it and then releases the resource.
@@ -55,3 +59,111 @@ import (
func WithResource[A, R, ANY any](onCreate ReaderIOResult[R], onRelease Kleisli[R, ANY]) Kleisli[Kleisli[R, A], A] {
return RIOR.WithResource[A](onCreate, onRelease)
}
// onClose is a helper function that creates a ReaderIOResult for closing an io.Closer resource.
// It safely calls the Close() method and handles any errors that may occur during closing.
//
// Type Parameters:
// - A: Must implement io.Closer interface
//
// Parameters:
// - a: The resource to close
//
// Returns:
// - ReaderIOResult[any]: A computation that closes the resource and returns nil on success
//
// The function ignores the context parameter since closing operations typically don't need context.
// Any error from Close() is captured and returned as a Result error.
func onClose[A io.Closer](a A) ReaderIOResult[any] {
return func(_ context.Context) IOResult[any] {
return func() Result[any] {
return result.TryCatchError[any](nil, a.Close())
}
}
}
// WithCloser creates a resource management function specifically for io.Closer resources.
// This is a specialized version of WithResource that automatically handles closing of resources
// that implement the io.Closer interface.
//
// The function ensures that:
// - The resource is created using the onCreate function
// - The resource is automatically closed when the operation completes (success or failure)
// - Any errors during closing are properly handled
// - The resource is closed even if the main operation fails or the context is canceled
//
// Type Parameters:
// - B: The type of value returned by the resource-using function
// - A: The type of resource that implements io.Closer
//
// Parameters:
// - onCreate: ReaderIOResult that creates the io.Closer resource
//
// Returns:
// - A function that takes a resource-using function and returns a ReaderIOResult[B]
//
// Example with file operations:
//
// openFile := func(filename string) ReaderIOResult[*os.File] {
// return TryCatch(func(ctx context.Context) func() (*os.File, error) {
// return func() (*os.File, error) {
// return os.Open(filename)
// }
// })
// }
//
// fileReader := WithCloser(openFile("data.txt"))
// result := fileReader(func(f *os.File) ReaderIOResult[string] {
// return TryCatch(func(ctx context.Context) func() (string, error) {
// return func() (string, error) {
// data, err := io.ReadAll(f)
// return string(data), err
// }
// })
// })
//
// Example with HTTP response:
//
// httpGet := func(url string) ReaderIOResult[*http.Response] {
// return TryCatch(func(ctx context.Context) func() (*http.Response, error) {
// return func() (*http.Response, error) {
// return http.Get(url)
// }
// })
// }
//
// responseReader := WithCloser(httpGet("https://api.example.com/data"))
// result := responseReader(func(resp *http.Response) ReaderIOResult[[]byte] {
// return TryCatch(func(ctx context.Context) func() ([]byte, error) {
// return func() ([]byte, error) {
// return io.ReadAll(resp.Body)
// }
// })
// })
//
// Example with database connection:
//
// openDB := func(dsn string) ReaderIOResult[*sql.DB] {
// return TryCatch(func(ctx context.Context) func() (*sql.DB, error) {
// return func() (*sql.DB, error) {
// return sql.Open("postgres", dsn)
// }
// })
// }
//
// dbQuery := WithCloser(openDB("postgres://..."))
// result := dbQuery(func(db *sql.DB) ReaderIOResult[[]User] {
// return TryCatch(func(ctx context.Context) func() ([]User, error) {
// return func() ([]User, error) {
// rows, err := db.QueryContext(ctx, "SELECT * FROM users")
// if err != nil {
// return nil, err
// }
// defer rows.Close()
// return scanUsers(rows)
// }
// })
// })
func WithCloser[B any, A io.Closer](onCreate ReaderIOResult[A]) Kleisli[Kleisli[A, B], B] {
return WithResource[B](onCreate, onClose[A])
}

View File

@@ -0,0 +1,179 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache LicensVersion 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"context"
"time"
RIO "github.com/IBM/fp-go/v2/context/readerio"
R "github.com/IBM/fp-go/v2/retry"
RG "github.com/IBM/fp-go/v2/retry/generic"
)
// Retrying retries a ReaderIOResult computation according to a retry policy with context awareness.
//
// This function implements a retry mechanism for operations that depend on a [context.Context],
// perform side effects (IO), and can fail (Result). It respects context cancellation, meaning
// that if the context is cancelled during retry delays, the operation will stop immediately
// and return the cancellation error.
//
// The retry loop will continue until one of the following occurs:
// - The action succeeds and the check function returns false (no retry needed)
// - The retry policy returns None (retry limit reached)
// - The check function returns false (indicating success or a non-retryable failure)
// - The context is cancelled (returns context.Canceled or context.DeadlineExceeded)
//
// Parameters:
//
// - policy: A RetryPolicy that determines when and how long to wait between retries.
// The policy receives a RetryStatus on each iteration and returns an optional delay.
// If it returns None, retrying stops. Common policies include LimitRetries,
// ExponentialBackoff, and CapDelay from the retry package.
//
// - action: A Kleisli arrow that takes a RetryStatus and returns a ReaderIOResult[A].
// This function is called on each retry attempt and receives information about the
// current retry state (iteration number, cumulative delay, etc.). The action depends
// on a context.Context and produces a Result[A]. The context passed to the action
// will be the same context used for retry delays, so cancellation is properly propagated.
//
// - check: A predicate function that examines the Result[A] and returns true if the
// operation should be retried, or false if it should stop. This allows you to
// distinguish between retryable failures (e.g., network timeouts) and permanent
// failures (e.g., invalid input). Note that context cancellation errors will
// automatically stop retrying regardless of this function's return value.
//
// Returns:
//
// A ReaderIOResult[A] that, when executed with a context, will perform the retry
// logic with context cancellation support and return the final result.
//
// Type Parameters:
// - A: The type of the success value
//
// Context Cancellation:
//
// The retry mechanism respects context cancellation in two ways:
// 1. During retry delays: If the context is cancelled while waiting between retries,
// the operation stops immediately and returns the context error.
// 2. During action execution: If the action itself checks the context and returns
// an error due to cancellation, the retry loop will stop (assuming the check
// function doesn't force a retry on context errors).
//
// Example:
//
// // Create a retry policy: exponential backoff with a cap, limited to 5 retries
// policy := M.Concat(
// retry.LimitRetries(5),
// retry.CapDelay(10*time.Second, retry.ExponentialBackoff(100*time.Millisecond)),
// )(retry.Monoid)
//
// // Action that fetches data, with retry status information
// fetchData := func(status retry.RetryStatus) ReaderIOResult[string] {
// return func(ctx context.Context) IOResult[string] {
// return func() Result[string] {
// // Check if context is cancelled
// if ctx.Err() != nil {
// return result.Left[string](ctx.Err())
// }
// // Simulate an HTTP request that might fail
// if status.IterNumber < 3 {
// return result.Left[string](fmt.Errorf("temporary error"))
// }
// return result.Of("success")
// }
// }
// }
//
// // Check function: retry on any error except context cancellation
// shouldRetry := func(r Result[string]) bool {
// return result.IsLeft(r) && !errors.Is(result.GetLeft(r), context.Canceled)
// }
//
// // Create the retrying computation
// retryingFetch := Retrying(policy, fetchData, shouldRetry)
//
// // Execute with a cancellable context
// ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
// defer cancel()
// ioResult := retryingFetch(ctx)
// finalResult := ioResult()
//
// See also:
// - retry.RetryPolicy for available retry policies
// - retry.RetryStatus for information passed to the action
// - context.Context for context cancellation semantics
//
//go:inline
func Retrying[A any](
policy R.RetryPolicy,
action Kleisli[R.RetryStatus, A],
check func(Result[A]) bool,
) ReaderIOResult[A] {
// delayWithCancel implements a context-aware delay mechanism for retry operations.
// It creates a timeout context that will be cancelled when either:
// 1. The delay duration expires (normal case), or
// 2. The parent context is cancelled (early termination)
//
// The function waits on timeoutCtx.Done(), which will be signaled in either case:
// - If the delay expires, timeoutCtx is cancelled by the timeout
// - If the parent ctx is cancelled, timeoutCtx inherits the cancellation
//
// After the wait completes, we dispatch to the next action by calling ri(ctx)().
// This works correctly because the action is wrapped in WithContextK, which handles
// context cancellation by checking ctx.Err() and returning an appropriate error
// (context.Canceled or context.DeadlineExceeded) when the context is cancelled.
//
// This design ensures that:
// - Retry delays respect context cancellation and terminate immediately
// - The cancellation error propagates correctly through the retry chain
// - No unnecessary delays occur when the context is already cancelled
delayWithCancel := func(delay time.Duration) RIO.Operator[R.RetryStatus, R.RetryStatus] {
return func(ri ReaderIO[R.RetryStatus]) ReaderIO[R.RetryStatus] {
return func(ctx context.Context) IO[R.RetryStatus] {
return func() R.RetryStatus {
// Create a timeout context that will be cancelled when either:
// - The delay duration expires, or
// - The parent context is cancelled
timeoutCtx, cancelTimeout := context.WithTimeout(ctx, delay)
defer cancelTimeout()
// Wait for either the timeout or parent context cancellation
<-timeoutCtx.Done()
// Dispatch to the next action with the original context.
// WithContextK will handle context cancellation correctly.
return ri(ctx)()
}
}
}
}
// get an implementation for the types
return RG.Retrying(
RIO.Chain[Result[A], Result[A]],
RIO.Chain[R.RetryStatus, Result[A]],
RIO.Of[Result[A]],
RIO.Of[R.RetryStatus],
delayWithCancel,
policy,
WithContextK(action),
check,
)
}

View File

@@ -0,0 +1,511 @@
// Copyright (c) 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerioresult
import (
"context"
"errors"
"fmt"
"testing"
"time"
"github.com/IBM/fp-go/v2/result"
R "github.com/IBM/fp-go/v2/retry"
"github.com/stretchr/testify/assert"
)
// Helper function to create a test retry policy
func testRetryPolicy() R.RetryPolicy {
return R.Monoid.Concat(
R.LimitRetries(5),
R.CapDelay(1*time.Second, R.ExponentialBackoff(10*time.Millisecond)),
)
}
// TestRetrying_SuccessOnFirstAttempt tests that Retrying succeeds immediately
// when the action succeeds on the first attempt.
func TestRetrying_SuccessOnFirstAttempt(t *testing.T) {
policy := testRetryPolicy()
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
return result.Of("success")
}
}
}
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
assert.Equal(t, result.Of("success"), res)
}
// TestRetrying_SuccessAfterRetries tests that Retrying eventually succeeds
// after a few failed attempts.
func TestRetrying_SuccessAfterRetries(t *testing.T) {
policy := testRetryPolicy()
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
// Fail on first 3 attempts, succeed on 4th
if status.IterNumber < 3 {
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
return result.Of(fmt.Sprintf("success on attempt %d", status.IterNumber))
}
}
}
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
assert.Equal(t, result.Of("success on attempt 3"), res)
}
// TestRetrying_ExhaustsRetries tests that Retrying stops after the retry limit
// is reached and returns the last error.
func TestRetrying_ExhaustsRetries(t *testing.T) {
policy := R.LimitRetries(3)
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
}
}
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
assert.True(t, result.IsLeft(res))
assert.Equal(t, result.Left[string](fmt.Errorf("attempt 3 failed")), res)
}
// TestRetrying_ActionChecksContextCancellation tests that actions can check
// the context and return early if it's cancelled.
func TestRetrying_ActionChecksContextCancellation(t *testing.T) {
policy := R.LimitRetries(10)
attemptCount := 0
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
attemptCount++
// Check context at the start of the action
if ctx.Err() != nil {
return result.Left[string](ctx.Err())
}
// Simulate work that might take time
time.Sleep(10 * time.Millisecond)
// Check context again after work
if ctx.Err() != nil {
return result.Left[string](ctx.Err())
}
// Always fail to trigger retries
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
}
}
check := func(r Result[string]) bool {
// Don't retry on context errors
val, err := result.Unwrap(r)
_ = val
if err != nil && (errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded)) {
return false
}
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
// Create a context that we'll cancel after a short time
ctx, cancel := context.WithCancel(t.Context())
// Start the retry operation in a goroutine
resultChan := make(chan Result[string], 1)
go func() {
res := retrying(ctx)()
resultChan <- res
}()
// Cancel the context after allowing a couple attempts
time.Sleep(50 * time.Millisecond)
cancel()
// Wait for the result
res := <-resultChan
// Should have stopped due to context cancellation
assert.True(t, result.IsLeft(res))
// Should have stopped early (not all 10 attempts)
assert.Less(t, attemptCount, 10, "Should stop retrying when action detects context cancellation")
// The error should be related to context cancellation or an early attempt
val, err := result.Unwrap(res)
_ = val
assert.Error(t, err)
}
// TestRetrying_ContextCancelledBeforeStart tests that if the context is already
// cancelled before starting, the operation fails immediately.
func TestRetrying_ContextCancelledBeforeStart(t *testing.T) {
policy := testRetryPolicy()
attemptCount := 0
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
attemptCount++
// Check context before doing work
if ctx.Err() != nil {
return result.Left[string](ctx.Err())
}
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
}
}
check := func(r Result[string]) bool {
// Don't retry on context errors
val, err := result.Unwrap(r)
_ = val
if err != nil && errors.Is(err, context.Canceled) {
return false
}
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
// Create an already-cancelled context
ctx, cancel := context.WithCancel(t.Context())
cancel()
res := retrying(ctx)()
assert.True(t, result.IsLeft(res))
val, err := result.Unwrap(res)
_ = val
assert.True(t, errors.Is(err, context.Canceled))
// Should have attempted at most once
assert.LessOrEqual(t, attemptCount, 1)
}
// TestRetrying_ContextTimeoutInAction tests that actions respect context deadlines.
func TestRetrying_ContextTimeoutInAction(t *testing.T) {
policy := R.LimitRetries(10)
attemptCount := 0
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
attemptCount++
// Check context before doing work
if ctx.Err() != nil {
return result.Left[string](ctx.Err())
}
// Simulate some work
time.Sleep(50 * time.Millisecond)
// Check context after work
if ctx.Err() != nil {
return result.Left[string](ctx.Err())
}
// Always fail to trigger retries
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
}
}
check := func(r Result[string]) bool {
// Don't retry on context errors
val, err := result.Unwrap(r)
_ = val
if err != nil && (errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded)) {
return false
}
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
// Create a context with a short timeout
ctx, cancel := context.WithTimeout(t.Context(), 150*time.Millisecond)
defer cancel()
startTime := time.Now()
res := retrying(ctx)()
elapsed := time.Since(startTime)
assert.True(t, result.IsLeft(res))
// Should have stopped before completing all 10 retries
assert.Less(t, attemptCount, 10, "Should stop retrying when action detects context timeout")
// Should have stopped around the timeout duration
assert.Less(t, elapsed, 500*time.Millisecond, "Should stop soon after timeout")
}
// TestRetrying_CheckFunctionStopsRetry tests that the check function can
// stop retrying even when errors occur.
func TestRetrying_CheckFunctionStopsRetry(t *testing.T) {
policy := testRetryPolicy()
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
if status.IterNumber == 0 {
return result.Left[string](fmt.Errorf("retryable error"))
}
return result.Left[string](fmt.Errorf("permanent error"))
}
}
}
// Only retry on "retryable error"
check := func(r Result[string]) bool {
return result.IsLeft(r) && result.Fold(
func(err error) bool { return err.Error() == "retryable error" },
func(string) bool { return false },
)(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
assert.Equal(t, result.Left[string](fmt.Errorf("permanent error")), res)
}
// TestRetrying_ExponentialBackoff tests that exponential backoff is applied.
func TestRetrying_ExponentialBackoff(t *testing.T) {
// Use a policy with measurable delays
policy := R.Monoid.Concat(
R.LimitRetries(3),
R.ExponentialBackoff(50*time.Millisecond),
)
startTime := time.Now()
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
if status.IterNumber < 2 {
return result.Left[string](fmt.Errorf("retry"))
}
return result.Of("success")
}
}
}
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
elapsed := time.Since(startTime)
assert.Equal(t, result.Of("success"), res)
// With exponential backoff starting at 50ms:
// Iteration 0: no delay
// Iteration 1: 50ms delay
// Iteration 2: 100ms delay
// Total should be at least 150ms
assert.GreaterOrEqual(t, elapsed, 150*time.Millisecond)
}
// TestRetrying_ContextValuePropagation tests that context values are properly
// propagated through the retry mechanism.
func TestRetrying_ContextValuePropagation(t *testing.T) {
policy := R.LimitRetries(2)
type contextKey string
const requestIDKey contextKey = "requestID"
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
// Extract value from context
requestID, ok := ctx.Value(requestIDKey).(string)
if !ok {
return result.Left[string](fmt.Errorf("missing request ID"))
}
if status.IterNumber < 1 {
return result.Left[string](fmt.Errorf("retry needed"))
}
return result.Of(fmt.Sprintf("processed request %s", requestID))
}
}
}
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
// Create context with a value
ctx := context.WithValue(t.Context(), requestIDKey, "12345")
res := retrying(ctx)()
assert.Equal(t, result.Of("processed request 12345"), res)
}
// TestRetrying_RetryStatusProgression tests that the RetryStatus is properly
// updated on each iteration.
func TestRetrying_RetryStatusProgression(t *testing.T) {
policy := testRetryPolicy()
var iterations []uint
action := func(status R.RetryStatus) ReaderIOResult[int] {
return func(ctx context.Context) IOResult[int] {
return func() Result[int] {
iterations = append(iterations, status.IterNumber)
if status.IterNumber < 3 {
return result.Left[int](fmt.Errorf("retry"))
}
return result.Of(int(status.IterNumber))
}
}
}
check := func(r Result[int]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
ctx := t.Context()
res := retrying(ctx)()
assert.Equal(t, result.Of(3), res)
// Should have attempted iterations 0, 1, 2, 3
assert.Equal(t, []uint{0, 1, 2, 3}, iterations)
}
// TestRetrying_ContextCancelledDuringDelay tests that the retry operation
// stops immediately when the context is cancelled during a retry delay,
// even if there are still retries remaining according to the policy.
func TestRetrying_ContextCancelledDuringDelay(t *testing.T) {
// Use a policy with significant delays to ensure we can cancel during the delay
policy := R.Monoid.Concat(
R.LimitRetries(10),
R.ConstantDelay(200*time.Millisecond),
)
attemptCount := 0
action := func(status R.RetryStatus) ReaderIOResult[string] {
return func(ctx context.Context) IOResult[string] {
return func() Result[string] {
attemptCount++
// Always fail to trigger retries
return result.Left[string](fmt.Errorf("attempt %d failed", status.IterNumber))
}
}
}
// Always retry on errors (don't check for context cancellation in check function)
check := func(r Result[string]) bool {
return result.IsLeft(r)
}
retrying := Retrying(policy, action, check)
// Create a context that we'll cancel during the retry delay
ctx, cancel := context.WithCancel(t.Context())
// Start the retry operation in a goroutine
resultChan := make(chan Result[string], 1)
startTime := time.Now()
go func() {
res := retrying(ctx)()
resultChan <- res
}()
// Wait for the first attempt to complete and the delay to start
time.Sleep(50 * time.Millisecond)
// Cancel the context during the retry delay
cancel()
// Wait for the result
res := <-resultChan
elapsed := time.Since(startTime)
// Should have stopped due to context cancellation
assert.True(t, result.IsLeft(res))
// Should have attempted only once or twice (not all 10 attempts)
// because the context was cancelled during the delay
assert.LessOrEqual(t, attemptCount, 2, "Should stop retrying when context is cancelled during delay")
// Should have stopped quickly after cancellation, not waiting for all delays
// With 10 retries and 200ms delays, it would take ~2 seconds without cancellation
// With cancellation during first delay, it should complete in well under 500ms
assert.Less(t, elapsed, 500*time.Millisecond, "Should stop immediately when context is cancelled during delay")
// When context is cancelled during the delay, the retry mechanism
// detects the cancellation and returns a context error
val, err := result.Unwrap(res)
_ = val
assert.Error(t, err)
// The error should be a context cancellation error since cancellation
// happened during the delay between retries
assert.True(t, errors.Is(err, context.Canceled), "Should return context.Canceled when cancelled during delay")
}

View File

@@ -18,6 +18,7 @@ package readerioresult
import (
"github.com/IBM/fp-go/v2/array"
"github.com/IBM/fp-go/v2/function"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/record"
)
@@ -34,7 +35,7 @@ func TraverseArray[A, B any](f Kleisli[A, B]) Kleisli[[]A, []B] {
Map[[]B, func(B) []B],
Ap[[]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -78,7 +79,7 @@ func TraverseRecord[K comparable, A, B any](f Kleisli[A, B]) Kleisli[map[K]A, ma
Map[map[K]B, func(B) map[K]B],
Ap[map[K]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -123,7 +124,7 @@ func MonadTraverseArraySeq[A, B any](as []A, f Kleisli[A, B]) ReaderIOResult[[]B
Map[[]B, func(B) []B],
ApSeq[[]B, B],
as,
f,
F.Flow2(f, WithContext),
)
}
@@ -139,7 +140,7 @@ func TraverseArraySeq[A, B any](f Kleisli[A, B]) Kleisli[[]A, []B] {
Of[[]B],
Map[[]B, func(B) []B],
ApSeq[[]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -171,7 +172,7 @@ func MonadTraverseRecordSeq[K comparable, A, B any](as map[K]A, f Kleisli[A, B])
Map[map[K]B, func(B) map[K]B],
ApSeq[map[K]B, B],
as,
f,
F.Flow2(f, WithContext),
)
}
@@ -182,7 +183,7 @@ func TraverseRecordSeq[K comparable, A, B any](f Kleisli[A, B]) Kleisli[map[K]A,
Map[map[K]B, func(B) map[K]B],
ApSeq[map[K]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -216,7 +217,7 @@ func MonadTraverseArrayPar[A, B any](as []A, f Kleisli[A, B]) ReaderIOResult[[]B
Map[[]B, func(B) []B],
ApPar[[]B, B],
as,
f,
F.Flow2(f, WithContext),
)
}
@@ -232,7 +233,7 @@ func TraverseArrayPar[A, B any](f Kleisli[A, B]) Kleisli[[]A, []B] {
Of[[]B],
Map[[]B, func(B) []B],
ApPar[[]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -264,7 +265,7 @@ func TraverseRecordPar[K comparable, A, B any](f Kleisli[A, B]) Kleisli[map[K]A,
Map[map[K]B, func(B) map[K]B],
ApPar[map[K]B, B],
f,
F.Flow2(f, WithContext),
)
}
@@ -286,7 +287,7 @@ func MonadTraverseRecordPar[K comparable, A, B any](as map[K]A, f Kleisli[A, B])
Map[map[K]B, func(B) map[K]B],
ApPar[map[K]B, B],
as,
f,
F.Flow2(f, WithContext),
)
}

View File

@@ -18,12 +18,16 @@ package readerioresult
import (
"context"
"github.com/IBM/fp-go/v2/consumer"
"github.com/IBM/fp-go/v2/context/ioresult"
"github.com/IBM/fp-go/v2/context/readerresult"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/endomorphism"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioeither"
"github.com/IBM/fp-go/v2/lazy"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/optics/prism"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readereither"
@@ -126,4 +130,11 @@ type (
ReaderResult[A any] = readerresult.ReaderResult[A]
ReaderEither[R, E, A any] = readereither.ReaderEither[R, E, A]
ReaderOption[R, A any] = readeroption.ReaderOption[R, A]
Endomorphism[A any] = endomorphism.Endomorphism[A]
Consumer[A any] = consumer.Consumer[A]
Prism[S, T any] = prism.Prism[S, T]
Lens[S, T any] = lens.Lens[S, T]
)

View File

@@ -15,11 +15,14 @@
package readerresult
import "github.com/IBM/fp-go/v2/readereither"
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/readereither"
)
// TraverseArray transforms an array
func TraverseArray[A, B any](f Kleisli[A, B]) Kleisli[[]A, []B] {
return readereither.TraverseArray(f)
return readereither.TraverseArray(F.Flow2(f, WithContext))
}
// TraverseArrayWithIndex transforms an array

View File

@@ -17,7 +17,6 @@ package readerresult
import (
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
G "github.com/IBM/fp-go/v2/readereither/generic"
)
@@ -31,16 +30,26 @@ import (
// TenantID string
// }
// result := readereither.Do(State{})
//
//go:inline
func Do[S any](
empty S,
) ReaderResult[S] {
return G.Do[ReaderResult[S]](empty)
}
// Bind attaches the result of a computation to a context [S1] to produce a context [S2].
// Bind attaches the result of an EFFECTFUL computation to a context [S1] to produce a context [S2].
// This enables sequential composition where each step can depend on the results of previous steps
// and access the context.Context from the environment.
//
// IMPORTANT: Bind is for EFFECTFUL FUNCTIONS that depend on context.Context.
// The function parameter takes state and returns a ReaderResult[T], which is effectful because
// it depends on context.Context (can be cancelled, has deadlines, carries values).
//
// For PURE FUNCTIONS (side-effect free), use:
// - BindResultK: For pure functions with errors (State -> (Value, error))
// - Let: For pure functions without errors (State -> Value)
//
// The setter function takes the result of the computation and returns a function that
// updates the context from S1 to S2.
//
@@ -78,14 +87,27 @@ func Do[S any](
// },
// ),
// )
//
//go:inline
func Bind[S1, S2, T any](
setter func(T) func(S1) S2,
f Kleisli[S1, T],
) Kleisli[ReaderResult[S1], S2] {
return G.Bind[ReaderResult[S1], ReaderResult[S2]](setter, f)
return G.Bind[ReaderResult[S1], ReaderResult[S2]](setter, F.Flow2(f, WithContext))
}
// Let attaches the result of a computation to a context [S1] to produce a context [S2]
// Let attaches the result of a PURE computation to a context [S1] to produce a context [S2].
//
// IMPORTANT: Let is for PURE FUNCTIONS (side-effect free) that don't depend on context.Context.
// The function parameter takes state and returns a value directly, with no errors or effects.
//
// For EFFECTFUL FUNCTIONS (that need context.Context), use:
// - Bind: For effectful ReaderResult computations (State -> ReaderResult[Value])
//
// For PURE FUNCTIONS with error handling, use:
// - BindResultK: For pure functions with errors (State -> (Value, error))
//
//go:inline
func Let[S1, S2, T any](
setter func(T) func(S1) S2,
f func(S1) T,
@@ -93,7 +115,10 @@ func Let[S1, S2, T any](
return G.Let[ReaderResult[S1], ReaderResult[S2]](setter, f)
}
// LetTo attaches the a value to a context [S1] to produce a context [S2]
// LetTo attaches a constant value to a context [S1] to produce a context [S2].
// This is a PURE operation (side-effect free) that simply sets a field to a constant value.
//
//go:inline
func LetTo[S1, S2, T any](
setter func(T) func(S1) S2,
b T,
@@ -102,15 +127,27 @@ func LetTo[S1, S2, T any](
}
// BindTo initializes a new state [S1] from a value [T]
//
//go:inline
func BindTo[S1, T any](
setter func(T) S1,
) Kleisli[ReaderResult[T], S1] {
) Operator[T, S1] {
return G.BindTo[ReaderResult[S1], ReaderResult[T]](setter)
}
//go:inline
func BindToP[S1, T any](
setter Prism[S1, T],
) Operator[T, S1] {
return BindTo(setter.ReverseGet)
}
// ApS attaches a value to a context [S1] to produce a context [S2] by considering
// the context and the value concurrently (using Applicative rather than Monad).
// This allows independent computations to be combined without one depending on the result of the other.
// This allows independent EFFECTFUL computations to be combined without one depending on the result of the other.
//
// IMPORTANT: ApS is for EFFECTFUL FUNCTIONS that depend on context.Context.
// The ReaderResult parameter is effectful because it depends on context.Context.
//
// Unlike Bind, which sequences operations, ApS can be used when operations are independent
// and can conceptually run in parallel.
@@ -145,6 +182,8 @@ func BindTo[S1, T any](
// getTenantID,
// ),
// )
//
//go:inline
func ApS[S1, S2, T any](
setter func(T) func(S1) S2,
fa ReaderResult[T],
@@ -183,17 +222,24 @@ func ApS[S1, S2, T any](
// readereither.Do(Person{Name: "Alice", Age: 25}),
// readereither.ApSL(ageLens, getAge),
// )
//
//go:inline
func ApSL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
fa ReaderResult[T],
) Kleisli[ReaderResult[S], S] {
return ApS(lens.Set, fa)
}
// BindL is a variant of Bind that uses a lens to focus on a specific field in the state.
// It combines the lens-based field access with monadic composition, allowing you to:
// It combines the lens-based field access with monadic composition for EFFECTFUL computations.
//
// IMPORTANT: BindL is for EFFECTFUL FUNCTIONS that depend on context.Context.
// The function parameter returns a ReaderResult, which is effectful.
//
// It allows you to:
// 1. Extract a field value using the lens
// 2. Use that value in a computation that may fail
// 2. Use that value in an effectful computation that may fail
// 3. Update the field with the result
//
// Parameters:
@@ -227,15 +273,20 @@ func ApSL[S, T any](
// readereither.Of[error](Counter{Value: 42}),
// readereither.BindL(valueLens, increment),
// )
//
//go:inline
func BindL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
f Kleisli[T, T],
) Kleisli[ReaderResult[S], S] {
return Bind(lens.Set, F.Flow2(lens.Get, f))
return Bind(lens.Set, F.Flow2(lens.Get, F.Flow2(f, WithContext)))
}
// LetL is a variant of Let that uses a lens to focus on a specific field in the state.
// It applies a pure transformation to the focused field without any effects.
// It applies a PURE transformation to the focused field without any effects.
//
// IMPORTANT: LetL is for PURE FUNCTIONS (side-effect free) that don't depend on context.Context.
// The function parameter is a pure endomorphism (T -> T) with no errors or effects.
//
// Parameters:
// - lens: A lens that focuses on a field of type T within state S
@@ -262,15 +313,17 @@ func BindL[S, T any](
// readereither.LetL(valueLens, double),
// )
// // result when executed will be Right(Counter{Value: 42})
//
//go:inline
func LetL[S, T any](
lens L.Lens[S, T],
f func(T) T,
lens Lens[S, T],
f Endomorphism[T],
) Kleisli[ReaderResult[S], S] {
return Let(lens.Set, F.Flow2(lens.Get, f))
}
// LetToL is a variant of LetTo that uses a lens to focus on a specific field in the state.
// It sets the focused field to a constant value.
// It sets the focused field to a constant value. This is a PURE operation (side-effect free).
//
// Parameters:
// - lens: A lens that focuses on a field of type T within state S
@@ -296,8 +349,10 @@ func LetL[S, T any](
// readereither.LetToL(debugLens, false),
// )
// // result when executed will be Right(Config{Debug: false, Timeout: 30})
//
//go:inline
func LetToL[S, T any](
lens L.Lens[S, T],
lens Lens[S, T],
b T,
) Kleisli[ReaderResult[S], S] {
return LetTo(lens.Set, b)

View File

@@ -19,14 +19,23 @@ import (
"context"
E "github.com/IBM/fp-go/v2/either"
F "github.com/IBM/fp-go/v2/function"
)
// withContext wraps an existing ReaderResult and performs a context check for cancellation before deletating
func WithContext[A any](ma ReaderResult[A]) ReaderResult[A] {
return func(ctx context.Context) E.Either[error, A] {
if err := context.Cause(ctx); err != nil {
return E.Left[A](err)
if ctx.Err() != nil {
return E.Left[A](context.Cause(ctx))
}
return ma(ctx)
}
}
//go:inline
func WithContextK[A, B any](f Kleisli[A, B]) Kleisli[A, B] {
return F.Flow2(
f,
WithContext,
)
}

View File

@@ -0,0 +1,154 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
"context"
"github.com/IBM/fp-go/v2/reader"
RR "github.com/IBM/fp-go/v2/readerresult"
)
// SequenceReader swaps the order of environment parameters when the inner computation is a Reader.
//
// This function is specialized for the context.Context-based ReaderResult monad. It takes a
// ReaderResult that produces a Reader and returns a reader.Kleisli that produces Results.
// The context.Context is implicitly used as the outer environment type.
//
// Type Parameters:
// - R: The inner environment type (becomes outer after flip)
// - A: The success value type
//
// Parameters:
// - ma: A ReaderResult that takes context.Context and may produce a Reader[R, A]
//
// Returns:
// - A reader.Kleisli[context.Context, R, Result[A]], which is func(context.Context) func(R) Result[A]
//
// The function preserves error handling from the outer ReaderResult layer. If the outer
// computation fails, the error is propagated to the inner Result.
//
// Note: This is an inline wrapper around readerresult.SequenceReader, specialized for
// context.Context as the outer environment type.
//
// Example:
//
// type Database struct {
// ConnectionString string
// }
//
// // Original: takes context, may fail, produces Reader[Database, string]
// original := func(ctx context.Context) result.Result[reader.Reader[Database, string]] {
// if ctx.Err() != nil {
// return result.Error[reader.Reader[Database, string]](ctx.Err())
// }
// return result.Ok[error](func(db Database) string {
// return fmt.Sprintf("Query on %s", db.ConnectionString)
// })
// }
//
// // Sequenced: takes context first, then Database
// sequenced := SequenceReader(original)
//
// ctx := context.Background()
// db := Database{ConnectionString: "localhost:5432"}
//
// // Apply context first to get a function that takes database
// dbReader := sequenced(ctx)
// // Then apply database to get the final result
// result := dbReader(db)
// // result is Result[string]
//
// Use Cases:
// - Dependency injection: Flip parameter order to inject context first, then dependencies
// - Testing: Separate context handling from business logic for easier testing
// - Composition: Enable point-free style by fixing the context parameter first
//
//go:inline
func SequenceReader[R, A any](ma ReaderResult[Reader[R, A]]) reader.Kleisli[context.Context, R, Result[A]] {
return RR.SequenceReader(ma)
}
// TraverseReader transforms a value using a Reader function and swaps environment parameter order.
//
// This function combines mapping and parameter flipping in a single operation. It takes a
// Reader function (pure computation without error handling) and returns a function that:
// 1. Maps a ReaderResult[A] to ReaderResult[B] using the provided Reader function
// 2. Flips the parameter order so R comes before context.Context
//
// Type Parameters:
// - R: The inner environment type (becomes outer after flip)
// - A: The input value type
// - B: The output value type
//
// Parameters:
// - f: A reader.Kleisli[R, A, B], which is func(R) func(A) B - a pure Reader function
//
// Returns:
// - A function that takes ReaderResult[A] and returns Kleisli[R, B]
// - Kleisli[R, B] is func(R) ReaderResult[B], which is func(R) func(context.Context) Result[B]
//
// The function preserves error handling from the input ReaderResult. If the input computation
// fails, the error is propagated without applying the transformation function.
//
// Note: This is a wrapper around readerresult.TraverseReader, specialized for context.Context.
//
// Example:
//
// type Config struct {
// MaxRetries int
// }
//
// // A pure Reader function that depends on Config
// formatMessage := func(cfg Config) func(int) string {
// return func(value int) string {
// return fmt.Sprintf("Value: %d, MaxRetries: %d", value, cfg.MaxRetries)
// }
// }
//
// // Original computation that may fail
// computation := func(ctx context.Context) result.Result[int] {
// if ctx.Err() != nil {
// return result.Error[int](ctx.Err())
// }
// return result.Ok[error](42)
// }
//
// // Create a traversal that applies formatMessage and flips parameters
// traverse := TraverseReader[Config, int, string](formatMessage)
//
// // Apply to the computation
// flipped := traverse(computation)
//
// // Now we can provide Config first, then context
// cfg := Config{MaxRetries: 3}
// ctx := context.Background()
//
// result := flipped(cfg)(ctx)
// // result is Result[string] containing "Value: 42, MaxRetries: 3"
//
// Use Cases:
// - Dependency injection: Inject configuration/dependencies before context
// - Testing: Separate pure business logic from context handling
// - Composition: Build pipelines where dependencies are fixed before execution
// - Point-free style: Enable partial application by fixing dependencies first
//
//go:inline
func TraverseReader[R, A, B any](
f reader.Kleisli[R, A, B],
) func(ReaderResult[A]) Kleisli[R, B] {
return RR.TraverseReader[context.Context](f)
}

View File

@@ -0,0 +1,215 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package readerresult provides logging utilities for the ReaderResult monad,
// which combines the Reader monad (for dependency injection via context.Context)
// with the Result monad (for error handling).
//
// The logging functions in this package allow you to log Result values (both
// successes and errors) while preserving the functional composition style.
package readerresult
import (
"context"
"log/slog"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/logging"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/result"
)
// curriedLog creates a curried logging function that takes an slog.Attr and a context,
// then logs the attribute with the specified log level and message.
//
// This is an internal helper function used to create the logging pipeline in a
// point-free style. The currying allows for partial application in functional
// composition.
//
// Parameters:
// - logLevel: The slog.Level at which to log (e.g., LevelInfo, LevelError)
// - cb: A callback function that retrieves a logger from the context
// - message: The log message to display
//
// Returns:
// - A curried function that takes an slog.Attr, then a context, and performs logging
func curriedLog(
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
message string) func(slog.Attr) Reader[context.Context, struct{}] {
return F.Curry2(func(a slog.Attr, ctx context.Context) struct{} {
cb(ctx).LogAttrs(ctx, logLevel, message, a)
return struct{}{}
})
}
// SLogWithCallback creates a Kleisli arrow that logs a Result value using a custom
// logger callback and log level. The Result value is logged and then returned unchanged,
// making this function suitable for use in functional pipelines.
//
// This function logs both successful values and errors:
// - Success values are logged with the key "value"
// - Error values are logged with the key "error"
//
// The logging is performed as a side effect while preserving the Result value,
// allowing it to be used in the middle of a computation pipeline without
// interrupting the flow.
//
// Type Parameters:
// - A: The type of the success value in the Result
//
// Parameters:
// - logLevel: The slog.Level at which to log (e.g., LevelInfo, LevelDebug, LevelError)
// - cb: A callback function that retrieves a *slog.Logger from the context
// - message: The log message to display
//
// Returns:
// - A Kleisli arrow that takes a Result[A] and returns a ReaderResult[A]
// The returned ReaderResult, when executed with a context, logs the Result
// and returns it unchanged
//
// Example:
//
// type User struct {
// ID int
// Name string
// }
//
// // Custom logger callback
// getLogger := func(ctx context.Context) *slog.Logger {
// return slog.Default()
// }
//
// // Create a logging function for debug level
// logDebug := SLogWithCallback[User](slog.LevelDebug, getLogger, "User data")
//
// // Use in a pipeline
// ctx := context.Background()
// user := result.Of(User{ID: 123, Name: "Alice"})
// logged := logDebug(user)(ctx) // Logs: level=DEBUG msg="User data" value={ID:123 Name:Alice}
// // logged still contains the User value
//
// Example with error:
//
// err := errors.New("user not found")
// userResult := result.Left[User](err)
// logged := logDebug(userResult)(ctx) // Logs: level=DEBUG msg="User data" error="user not found"
// // logged still contains the error
func SLogWithCallback[A any](
logLevel slog.Level,
cb func(context.Context) *slog.Logger,
message string) Kleisli[Result[A], A] {
return F.Pipe1(
F.Flow2(
result.ToSLogAttr[A](),
curriedLog(logLevel, cb, message),
),
reader.Chain(reader.Sequence(F.Flow2( // this flow is basically the `MapTo` function with side effects
reader.Of[struct{}, Result[A]],
reader.Map[context.Context, struct{}, Result[A]],
))),
)
}
// SLog creates a Kleisli arrow that logs a Result value at INFO level using the
// logger from the context. This is a convenience function that uses SLogWithCallback
// with default settings.
//
// The Result value is logged and then returned unchanged, making this function
// suitable for use in functional pipelines for debugging or monitoring purposes.
//
// This function logs both successful values and errors:
// - Success values are logged with the key "value"
// - Error values are logged with the key "error"
//
// Type Parameters:
// - A: The type of the success value in the Result
//
// Parameters:
// - message: The log message to display
//
// Returns:
// - A Kleisli arrow that takes a Result[A] and returns a ReaderResult[A]
// The returned ReaderResult, when executed with a context, logs the Result
// at INFO level and returns it unchanged
//
// Example - Logging a successful computation:
//
// ctx := context.Background()
//
// // Simple value logging
// res := result.Of(42)
// logged := SLog[int]("Processing number")(res)(ctx)
// // Logs: level=INFO msg="Processing number" value=42
// // logged == result.Of(42)
//
// Example - Logging in a pipeline:
//
// type User struct {
// ID int
// Name string
// }
//
// fetchUser := func(id int) result.Result[User] {
// return result.Of(User{ID: id, Name: "Alice"})
// }
//
// processUser := func(user User) result.Result[string] {
// return result.Of(fmt.Sprintf("Processed: %s", user.Name))
// }
//
// ctx := context.Background()
//
// // Log at each step
// userResult := fetchUser(123)
// logged1 := SLog[User]("Fetched user")(userResult)(ctx)
// // Logs: level=INFO msg="Fetched user" value={ID:123 Name:Alice}
//
// processed := result.Chain(processUser)(logged1)
// logged2 := SLog[string]("Processed user")(processed)(ctx)
// // Logs: level=INFO msg="Processed user" value="Processed: Alice"
//
// Example - Logging errors:
//
// err := errors.New("database connection failed")
// errResult := result.Left[User](err)
// logged := SLog[User]("Database operation")(errResult)(ctx)
// // Logs: level=INFO msg="Database operation" error="database connection failed"
// // logged still contains the error
//
// Example - Using with context logger:
//
// // Set up a custom logger in the context
// logger := slog.New(slog.NewJSONHandler(os.Stdout, nil))
// ctx := logging.WithLogger(logger)(context.Background())
//
// res := result.Of("important data")
// logged := SLog[string]("Critical operation")(res)(ctx)
// // Uses the logger from context to log the message
//
// Note: The function uses logging.GetLoggerFromContext to retrieve the logger,
// which falls back to the global logger if no logger is found in the context.
//
//go:inline
func SLog[A any](message string) Kleisli[Result[A], A] {
return SLogWithCallback[A](slog.LevelInfo, logging.GetLoggerFromContext, message)
}
//go:inline
func TapSLog[A any](message string) Operator[A, A] {
return reader.Chain(SLog[A](message))
}

View File

@@ -0,0 +1,302 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
"bytes"
"context"
"errors"
"log/slog"
"testing"
"github.com/IBM/fp-go/v2/logging"
N "github.com/IBM/fp-go/v2/number"
"github.com/IBM/fp-go/v2/result"
"github.com/stretchr/testify/assert"
)
// TestSLogLogsSuccessValue tests that SLog logs successful Result values
func TestSLogLogsSuccessValue(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
// Create a Result and log it
res1 := result.Of(42)
logged := SLog[int]("Result value")(res1)(ctx)
assert.Equal(t, result.Of(42), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Result value")
assert.Contains(t, logOutput, "value=42")
}
// TestSLogLogsErrorValue tests that SLog logs error Result values
func TestSLogLogsErrorValue(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
testErr := errors.New("test error")
// Create an error Result and log it
res1 := result.Left[int](testErr)
logged := SLog[int]("Result value")(res1)(ctx)
assert.Equal(t, res1, logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Result value")
assert.Contains(t, logOutput, "error")
assert.Contains(t, logOutput, "test error")
}
// TestSLogInPipeline tests SLog in a functional pipeline
func TestSLogInPipeline(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
// SLog takes a Result[A] and returns ReaderResult[A]
// So we need to start with a Result, apply SLog, then execute with context
res1 := result.Of(10)
logged := SLog[int]("Initial value")(res1)(ctx)
assert.Equal(t, result.Of(10), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Initial value")
assert.Contains(t, logOutput, "value=10")
}
// TestSLogWithContextLogger tests SLog using logger from context
func TestSLogWithContextLogger(t *testing.T) {
var buf bytes.Buffer
contextLogger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
ctx := logging.WithLogger(contextLogger)(context.Background())
res1 := result.Of("test value")
logged := SLog[string]("Context logger test")(res1)(ctx)
assert.Equal(t, result.Of("test value"), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Context logger test")
assert.Contains(t, logOutput, `value="test value"`)
}
// TestSLogDisabled tests that SLog respects logger level
func TestSLogDisabled(t *testing.T) {
var buf bytes.Buffer
// Create logger with level that disables info logs
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelError, // Only log errors
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
res1 := result.Of(42)
logged := SLog[int]("This should not be logged")(res1)(ctx)
assert.Equal(t, result.Of(42), logged)
// Should have no logs since level is ERROR
logOutput := buf.String()
assert.Empty(t, logOutput, "Should have no logs when logging is disabled")
}
// TestSLogWithStruct tests SLog with structured data
func TestSLogWithStruct(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
type User struct {
ID int
Name string
}
ctx := context.Background()
user := User{ID: 123, Name: "Alice"}
res1 := result.Of(user)
logged := SLog[User]("User data")(res1)(ctx)
assert.Equal(t, result.Of(user), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "User data")
assert.Contains(t, logOutput, "ID:123")
assert.Contains(t, logOutput, "Name:Alice")
}
// TestSLogWithCallbackCustomLevel tests SLogWithCallback with custom log level
func TestSLogWithCallbackCustomLevel(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelDebug,
}))
customCallback := func(ctx context.Context) *slog.Logger {
return logger
}
ctx := context.Background()
// Create a Result and log it with custom callback
res1 := result.Of(42)
logged := SLogWithCallback[int](slog.LevelDebug, customCallback, "Debug result")(res1)(ctx)
assert.Equal(t, result.Of(42), logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Debug result")
assert.Contains(t, logOutput, "value=42")
assert.Contains(t, logOutput, "level=DEBUG")
}
// TestSLogWithCallbackLogsError tests SLogWithCallback logs errors
func TestSLogWithCallbackLogsError(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelWarn,
}))
customCallback := func(ctx context.Context) *slog.Logger {
return logger
}
ctx := context.Background()
testErr := errors.New("warning error")
// Create an error Result and log it with custom callback
res1 := result.Left[int](testErr)
logged := SLogWithCallback[int](slog.LevelWarn, customCallback, "Warning result")(res1)(ctx)
assert.Equal(t, res1, logged)
logOutput := buf.String()
assert.Contains(t, logOutput, "Warning result")
assert.Contains(t, logOutput, "error")
assert.Contains(t, logOutput, "warning error")
assert.Contains(t, logOutput, "level=WARN")
}
// TestSLogChainedOperations tests SLog in chained operations
func TestSLogChainedOperations(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
// First log step 1
res1 := result.Of(5)
logged1 := SLog[int]("Step 1")(res1)(ctx)
// Then log step 2 with doubled value
res2 := result.Map(N.Mul(2))(logged1)
logged2 := SLog[int]("Step 2")(res2)(ctx)
assert.Equal(t, result.Of(10), logged2)
logOutput := buf.String()
assert.Contains(t, logOutput, "Step 1")
assert.Contains(t, logOutput, "value=5")
assert.Contains(t, logOutput, "Step 2")
assert.Contains(t, logOutput, "value=10")
}
// TestSLogPreservesError tests that SLog preserves error through the pipeline
func TestSLogPreservesError(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
testErr := errors.New("original error")
res1 := result.Left[int](testErr)
logged := SLog[int]("Logging error")(res1)(ctx)
// Apply map to verify error is preserved
res2 := result.Map(N.Mul(2))(logged)
assert.Equal(t, res1, res2)
logOutput := buf.String()
assert.Contains(t, logOutput, "Logging error")
assert.Contains(t, logOutput, "original error")
}
// TestSLogMultipleValues tests logging multiple different values
func TestSLogMultipleValues(t *testing.T) {
var buf bytes.Buffer
logger := slog.New(slog.NewTextHandler(&buf, &slog.HandlerOptions{
Level: slog.LevelInfo,
}))
oldLogger := logging.SetLogger(logger)
defer logging.SetLogger(oldLogger)
ctx := context.Background()
// Test with different types
intRes := SLog[int]("Integer")(result.Of(42))(ctx)
assert.Equal(t, result.Of(42), intRes)
strRes := SLog[string]("String")(result.Of("hello"))(ctx)
assert.Equal(t, result.Of("hello"), strRes)
boolRes := SLog[bool]("Boolean")(result.Of(true))(ctx)
assert.Equal(t, result.Of(true), boolRes)
logOutput := buf.String()
assert.Contains(t, logOutput, "Integer")
assert.Contains(t, logOutput, "value=42")
assert.Contains(t, logOutput, "String")
assert.Contains(t, logOutput, "value=hello")
assert.Contains(t, logOutput, "Boolean")
assert.Contains(t, logOutput, "value=true")
}

View File

@@ -18,9 +18,17 @@ package readerresult
import (
"context"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/chain"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readereither"
)
func FromReader[A any](r Reader[context.Context, A]) ReaderResult[A] {
return readereither.FromReader[error](r)
}
func FromEither[A any](e Either[A]) ReaderResult[A] {
return readereither.FromEither[context.Context](e)
}
@@ -42,11 +50,11 @@ func Map[A, B any](f func(A) B) Operator[A, B] {
}
func MonadChain[A, B any](ma ReaderResult[A], f Kleisli[A, B]) ReaderResult[B] {
return readereither.MonadChain(ma, f)
return readereither.MonadChain(ma, F.Flow2(f, WithContext))
}
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B] {
return readereither.Chain(f)
return readereither.Chain(F.Flow2(f, WithContext))
}
func Of[A any](a A) ReaderResult[A] {
@@ -66,7 +74,7 @@ func FromPredicate[A any](pred func(A) bool, onFalse func(A) error) Kleisli[A, A
}
func OrElse[A any](onLeft Kleisli[error, A]) Kleisli[ReaderResult[A], A] {
return readereither.OrElse(onLeft)
return readereither.OrElse(F.Flow2(onLeft, WithContext))
}
func Ask() ReaderResult[context.Context] {
@@ -81,7 +89,7 @@ func ChainEitherK[A, B any](f func(A) Either[B]) func(ma ReaderResult[A]) Reader
return readereither.ChainEitherK[context.Context](f)
}
func ChainOptionK[A, B any](onNone func() error) func(func(A) Option[B]) Operator[A, B] {
func ChainOptionK[A, B any](onNone func() error) func(option.Kleisli[A, B]) Operator[A, B] {
return readereither.ChainOptionK[context.Context, A, B](onNone)
}
@@ -97,3 +105,197 @@ func Flap[B, A any](a A) Operator[func(A) B, B] {
func Read[A any](r context.Context) func(ReaderResult[A]) Result[A] {
return readereither.Read[error, A](r)
}
// MonadMapTo executes a ReaderResult computation, discards its success value, and returns a constant value.
// This is the monadic version that takes both the ReaderResult and the constant value as parameters.
//
// IMPORTANT: ReaderResult represents a side-effectful computation because it depends on context.Context,
// which is effectful (can be cancelled, has deadlines, carries values). For this reason, MonadMapTo WILL
// execute the original ReaderResult to allow any side effects to occur, then discard the success result
// and return the constant value. If the original computation fails, the error is preserved.
//
// Type Parameters:
// - A: The success type of the first ReaderResult (will be discarded if successful)
// - B: The type of the constant value to return on success
//
// Parameters:
// - ma: The ReaderResult to execute (side effects will occur, success value discarded)
// - b: The constant value to return if ma succeeds
//
// Returns:
// - A ReaderResult that executes ma, preserves errors, but replaces success values with b
//
// Example:
//
// type Config struct { Counter int }
// increment := func(ctx context.Context) result.Result[int] {
// // Side effect: log the operation
// fmt.Println("incrementing")
// return result.Of(5)
// }
// r := readerresult.MonadMapTo(increment, "done")
// result := r(context.Background()) // Prints "incrementing", returns Right("done")
//
//go:inline
func MonadMapTo[A, B any](ma ReaderResult[A], b B) ReaderResult[B] {
return MonadMap(ma, reader.Of[A](b))
}
// MapTo creates an operator that executes a ReaderResult computation, discards its success value,
// and returns a constant value. This is the curried version where the constant value is provided first,
// returning a function that can be applied to any ReaderResult.
//
// IMPORTANT: ReaderResult represents a side-effectful computation because it depends on context.Context,
// which is effectful (can be cancelled, has deadlines, carries values). For this reason, MapTo WILL
// execute the input ReaderResult to allow any side effects to occur, then discard the success result
// and return the constant value. If the computation fails, the error is preserved.
//
// Type Parameters:
// - A: The success type of the input ReaderResult (will be discarded if successful)
// - B: The type of the constant value to return on success
//
// Parameters:
// - b: The constant value to return on success
//
// Returns:
// - An Operator that executes a ReaderResult[A], preserves errors, but replaces success with b
//
// Example:
//
// logStep := func(ctx context.Context) result.Result[int] {
// fmt.Println("step executed")
// return result.Of(42)
// }
// toDone := readerresult.MapTo[int, string]("done")
// pipeline := toDone(logStep)
// result := pipeline(context.Background()) // Prints "step executed", returns Right("done")
//
// Example - In a functional pipeline:
//
// step1 := func(ctx context.Context) result.Result[int] {
// fmt.Println("processing")
// return result.Of(1)
// }
// pipeline := F.Pipe1(
// step1,
// readerresult.MapTo[int, string]("complete"),
// )
// output := pipeline(context.Background()) // Prints "processing", returns Right("complete")
//
//go:inline
func MapTo[A, B any](b B) Operator[A, B] {
return Map(reader.Of[A](b))
}
// MonadChainTo sequences two ReaderResult computations where the second ignores the first's success value.
// This is the monadic version that takes both ReaderResults as parameters.
//
// IMPORTANT: ReaderResult represents a side-effectful computation because it depends on context.Context,
// which is effectful (can be cancelled, has deadlines, carries values). For this reason, MonadChainTo WILL
// execute the first ReaderResult to allow any side effects to occur, then discard the success result and
// execute the second ReaderResult with the same context. If the first computation fails, the error is
// returned immediately without executing the second computation.
//
// Type Parameters:
// - A: The success type of the first ReaderResult (will be discarded if successful)
// - B: The success type of the second ReaderResult
//
// Parameters:
// - ma: The first ReaderResult to execute (side effects will occur, success value discarded)
// - b: The second ReaderResult to execute if ma succeeds
//
// Returns:
// - A ReaderResult that executes ma, then b if ma succeeds, returning b's result
//
// Example:
//
// logStart := func(ctx context.Context) result.Result[int] {
// fmt.Println("starting")
// return result.Of(1)
// }
// logEnd := func(ctx context.Context) result.Result[string] {
// fmt.Println("ending")
// return result.Of("done")
// }
// r := readerresult.MonadChainTo(logStart, logEnd)
// result := r(context.Background()) // Prints "starting" then "ending", returns Right("done")
//
//go:inline
func MonadChainTo[A, B any](ma ReaderResult[A], b ReaderResult[B]) ReaderResult[B] {
return MonadChain(ma, reader.Of[A](b))
}
// ChainTo creates an operator that sequences two ReaderResult computations where the second ignores
// the first's success value. This is the curried version where the second ReaderResult is provided first,
// returning a function that can be applied to any first ReaderResult.
//
// IMPORTANT: ReaderResult represents a side-effectful computation because it depends on context.Context,
// which is effectful (can be cancelled, has deadlines, carries values). For this reason, ChainTo WILL
// execute the first ReaderResult to allow any side effects to occur, then discard the success result and
// execute the second ReaderResult with the same context. If the first computation fails, the error is
// returned immediately without executing the second computation.
//
// Type Parameters:
// - A: The success type of the first ReaderResult (will be discarded if successful)
// - B: The success type of the second ReaderResult
//
// Parameters:
// - b: The second ReaderResult to execute after the first succeeds
//
// Returns:
// - An Operator that executes the first ReaderResult, then b if successful
//
// Example:
//
// logEnd := func(ctx context.Context) result.Result[string] {
// fmt.Println("ending")
// return result.Of("done")
// }
// thenLogEnd := readerresult.ChainTo[int, string](logEnd)
//
// logStart := func(ctx context.Context) result.Result[int] {
// fmt.Println("starting")
// return result.Of(1)
// }
// pipeline := thenLogEnd(logStart)
// result := pipeline(context.Background()) // Prints "starting" then "ending", returns Right("done")
//
// Example - In a functional pipeline:
//
// step1 := func(ctx context.Context) result.Result[int] {
// fmt.Println("step 1")
// return result.Of(1)
// }
// step2 := func(ctx context.Context) result.Result[string] {
// fmt.Println("step 2")
// return result.Of("complete")
// }
// pipeline := F.Pipe1(
// step1,
// readerresult.ChainTo[int, string](step2),
// )
// output := pipeline(context.Background()) // Prints "step 1" then "step 2", returns Right("complete")
//
//go:inline
func ChainTo[A, B any](b ReaderResult[B]) Operator[A, B] {
return Chain(reader.Of[A](b))
}
//go:inline
func MonadChainFirst[A, B any](ma ReaderResult[A], f Kleisli[A, B]) ReaderResult[A] {
return chain.MonadChainFirst(
MonadChain,
MonadMap,
ma,
F.Flow2(f, WithContext),
)
}
//go:inline
func ChainFirst[A, B any](f Kleisli[A, B]) Operator[A, A] {
return chain.ChainFirst(
Chain,
Map,
F.Flow2(f, WithContext),
)
}

View File

@@ -0,0 +1,315 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
"context"
"testing"
E "github.com/IBM/fp-go/v2/either"
F "github.com/IBM/fp-go/v2/function"
"github.com/stretchr/testify/assert"
)
func TestMapTo(t *testing.T) {
t.Run("executes original reader and returns constant value on success", func(t *testing.T) {
executed := false
originalReader := func(ctx context.Context) E.Either[error, int] {
executed = true
return E.Of[error](42)
}
// Apply MapTo operator
toDone := MapTo[int]("done")
resultReader := toDone(originalReader)
// Execute the resulting reader
result := resultReader(context.Background())
// Verify the constant value is returned
assert.Equal(t, E.Of[error]("done"), result)
// Verify the original reader WAS executed (side effect occurred)
assert.True(t, executed, "original reader should be executed to allow side effects")
})
t.Run("executes reader in functional pipeline", func(t *testing.T) {
executed := false
step1 := func(ctx context.Context) E.Either[error, int] {
executed = true
return E.Of[error](100)
}
pipeline := F.Pipe1(
step1,
MapTo[int]("complete"),
)
result := pipeline(context.Background())
assert.Equal(t, E.Of[error]("complete"), result)
assert.True(t, executed, "original reader should be executed in pipeline")
})
t.Run("executes reader with side effects", func(t *testing.T) {
sideEffectOccurred := false
readerWithSideEffect := func(ctx context.Context) E.Either[error, int] {
sideEffectOccurred = true
return E.Of[error](42)
}
resultReader := MapTo[int](true)(readerWithSideEffect)
result := resultReader(context.Background())
assert.Equal(t, E.Of[error](true), result)
assert.True(t, sideEffectOccurred, "side effect should occur")
})
t.Run("preserves errors from original reader", func(t *testing.T) {
executed := false
testErr := assert.AnError
failingReader := func(ctx context.Context) E.Either[error, int] {
executed = true
return E.Left[int](testErr)
}
resultReader := MapTo[int]("done")(failingReader)
result := resultReader(context.Background())
assert.Equal(t, E.Left[string](testErr), result)
assert.True(t, executed, "failing reader should still be executed")
})
}
func TestMonadMapTo(t *testing.T) {
t.Run("executes original reader and returns constant value on success", func(t *testing.T) {
executed := false
originalReader := func(ctx context.Context) E.Either[error, int] {
executed = true
return E.Of[error](42)
}
// Apply MonadMapTo
resultReader := MonadMapTo(originalReader, "done")
// Execute the resulting reader
result := resultReader(context.Background())
// Verify the constant value is returned
assert.Equal(t, E.Of[error]("done"), result)
// Verify the original reader WAS executed (side effect occurred)
assert.True(t, executed, "original reader should be executed to allow side effects")
})
t.Run("executes complex computation with side effects", func(t *testing.T) {
computationExecuted := false
complexReader := func(ctx context.Context) E.Either[error, string] {
computationExecuted = true
return E.Of[error]("complex result")
}
resultReader := MonadMapTo(complexReader, 42)
result := resultReader(context.Background())
assert.Equal(t, E.Of[error](42), result)
assert.True(t, computationExecuted, "complex computation should be executed")
})
t.Run("preserves errors from original reader", func(t *testing.T) {
executed := false
testErr := assert.AnError
failingReader := func(ctx context.Context) E.Either[error, []string] {
executed = true
return E.Left[[]string](testErr)
}
resultReader := MonadMapTo(failingReader, 99)
result := resultReader(context.Background())
assert.Equal(t, E.Left[int](testErr), result)
assert.True(t, executed, "failing reader should still be executed")
})
}
func TestChainTo(t *testing.T) {
t.Run("executes first reader then second reader on success", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
firstReader := func(ctx context.Context) E.Either[error, int] {
firstExecuted = true
return E.Of[error](42)
}
secondReader := func(ctx context.Context) E.Either[error, string] {
secondExecuted = true
return E.Of[error]("result")
}
// Apply ChainTo operator
thenSecond := ChainTo[int](secondReader)
resultReader := thenSecond(firstReader)
// Execute the resulting reader
result := resultReader(context.Background())
// Verify the second reader's result is returned
assert.Equal(t, E.Of[error]("result"), result)
// Verify both readers were executed
assert.True(t, firstExecuted, "first reader should be executed")
assert.True(t, secondExecuted, "second reader should be executed")
})
t.Run("executes both readers in functional pipeline", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
step1 := func(ctx context.Context) E.Either[error, int] {
firstExecuted = true
return E.Of[error](100)
}
step2 := func(ctx context.Context) E.Either[error, string] {
secondExecuted = true
return E.Of[error]("complete")
}
pipeline := F.Pipe1(
step1,
ChainTo[int](step2),
)
result := pipeline(context.Background())
assert.Equal(t, E.Of[error]("complete"), result)
assert.True(t, firstExecuted, "first reader should be executed in pipeline")
assert.True(t, secondExecuted, "second reader should be executed in pipeline")
})
t.Run("executes first reader with side effects", func(t *testing.T) {
sideEffectOccurred := false
readerWithSideEffect := func(ctx context.Context) E.Either[error, int] {
sideEffectOccurred = true
return E.Of[error](42)
}
secondReader := func(ctx context.Context) E.Either[error, bool] {
return E.Of[error](true)
}
resultReader := ChainTo[int](secondReader)(readerWithSideEffect)
result := resultReader(context.Background())
assert.Equal(t, E.Of[error](true), result)
assert.True(t, sideEffectOccurred, "side effect should occur in first reader")
})
t.Run("preserves error from first reader without executing second", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
testErr := assert.AnError
failingReader := func(ctx context.Context) E.Either[error, int] {
firstExecuted = true
return E.Left[int](testErr)
}
secondReader := func(ctx context.Context) E.Either[error, string] {
secondExecuted = true
return E.Of[error]("result")
}
resultReader := ChainTo[int](secondReader)(failingReader)
result := resultReader(context.Background())
assert.Equal(t, E.Left[string](testErr), result)
assert.True(t, firstExecuted, "first reader should be executed")
assert.False(t, secondExecuted, "second reader should not be executed on error")
})
}
func TestMonadChainTo(t *testing.T) {
t.Run("executes first reader then second reader on success", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
firstReader := func(ctx context.Context) E.Either[error, int] {
firstExecuted = true
return E.Of[error](42)
}
secondReader := func(ctx context.Context) E.Either[error, string] {
secondExecuted = true
return E.Of[error]("result")
}
// Apply MonadChainTo
resultReader := MonadChainTo(firstReader, secondReader)
// Execute the resulting reader
result := resultReader(context.Background())
// Verify the second reader's result is returned
assert.Equal(t, E.Of[error]("result"), result)
// Verify both readers were executed
assert.True(t, firstExecuted, "first reader should be executed")
assert.True(t, secondExecuted, "second reader should be executed")
})
t.Run("executes complex first computation with side effects", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
complexFirstReader := func(ctx context.Context) E.Either[error, []int] {
firstExecuted = true
return E.Of[error]([]int{1, 2, 3})
}
secondReader := func(ctx context.Context) E.Either[error, string] {
secondExecuted = true
return E.Of[error]("done")
}
resultReader := MonadChainTo(complexFirstReader, secondReader)
result := resultReader(context.Background())
assert.Equal(t, E.Of[error]("done"), result)
assert.True(t, firstExecuted, "complex first computation should be executed")
assert.True(t, secondExecuted, "second reader should be executed")
})
t.Run("preserves error from first reader without executing second", func(t *testing.T) {
firstExecuted := false
secondExecuted := false
testErr := assert.AnError
failingReader := func(ctx context.Context) E.Either[error, map[string]int] {
firstExecuted = true
return E.Left[map[string]int](testErr)
}
secondReader := func(ctx context.Context) E.Either[error, float64] {
secondExecuted = true
return E.Of[error](3.14)
}
resultReader := MonadChainTo(failingReader, secondReader)
result := resultReader(context.Background())
assert.Equal(t, E.Left[float64](testErr), result)
assert.True(t, firstExecuted, "first reader should be executed")
assert.False(t, secondExecuted, "second reader should not be executed on error")
})
}

View File

@@ -0,0 +1,106 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package readerresult implements a specialization of the Reader monad assuming a golang context as the context of the monad and a standard golang error
package readerresult
import (
"context"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/result"
)
// TailRec implements tail-recursive computation for ReaderResult with context cancellation support.
//
// TailRec takes a Kleisli function that returns Either[A, B] and converts it into a stack-safe,
// tail-recursive computation. The function repeatedly applies the Kleisli until it produces a Right value.
//
// The implementation includes a short-circuit mechanism that checks for context cancellation on each
// iteration. If the context is canceled (ctx.Err() != nil), the computation immediately returns a
// Left result containing the context's cause error, preventing unnecessary computation.
//
// Type Parameters:
// - A: The input type for the recursive step
// - B: The final result type
//
// Parameters:
// - f: A Kleisli function that takes an A and returns a ReaderResult containing Either[A, B].
// When the result is Left[B](a), recursion continues with the new value 'a'.
// When the result is Right[A](b), recursion terminates with the final value 'b'.
//
// Returns:
// - A Kleisli function that performs the tail-recursive computation in a stack-safe manner.
//
// Behavior:
// - On each iteration, checks if the context has been canceled (short circuit)
// - If canceled, returns result.Left[B](context.Cause(ctx))
// - If the step returns Left[B](error), propagates the error
// - If the step returns Right[A](Left[B](a)), continues recursion with new value 'a'
// - If the step returns Right[A](Right[A](b)), terminates with success value 'b'
//
// Example - Factorial computation with context:
//
// type State struct {
// n int
// acc int
// }
//
// factorialStep := func(state State) ReaderResult[either.Either[State, int]] {
// return func(ctx context.Context) result.Result[either.Either[State, int]] {
// if state.n <= 0 {
// return result.Of(either.Right[State](state.acc))
// }
// return result.Of(either.Left[int](State{state.n - 1, state.acc * state.n}))
// }
// }
//
// factorial := TailRec(factorialStep)
// result := factorial(State{5, 1})(ctx) // Returns result.Of(120)
//
// Example - Context cancellation:
//
// ctx, cancel := context.WithCancel(context.Background())
// cancel() // Cancel immediately
//
// computation := TailRec(someStep)
// result := computation(initialValue)(ctx)
// // Returns result.Left[B](context.Cause(ctx)) without executing any steps
//
//go:inline
func TailRec[A, B any](f Kleisli[A, either.Either[A, B]]) Kleisli[A, B] {
return func(a A) ReaderResult[B] {
initialReader := f(a)
return func(ctx context.Context) Result[B] {
rdr := initialReader
for {
// short circuit
if ctx.Err() != nil {
return result.Left[B](context.Cause(ctx))
}
current := rdr(ctx)
rec, e := either.Unwrap(current)
if either.IsLeft(current) {
return result.Left[B](e)
}
b, a := either.Unwrap(rec)
if either.IsRight(rec) {
return result.Of(b)
}
rdr = f(a)
}
}
}
}

View File

@@ -0,0 +1,498 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
"context"
"errors"
"fmt"
"testing"
"time"
A "github.com/IBM/fp-go/v2/array"
E "github.com/IBM/fp-go/v2/either"
R "github.com/IBM/fp-go/v2/result"
"github.com/stretchr/testify/assert"
)
// TestTailRecFactorial tests factorial computation with context
func TestTailRecFactorial(t *testing.T) {
type State struct {
n int
acc int
}
factorialStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.n <= 0 {
return R.Of(E.Right[State](state.acc))
}
return R.Of(E.Left[int](State{state.n - 1, state.acc * state.n}))
}
}
factorial := TailRec(factorialStep)
result := factorial(State{5, 1})(context.Background())
assert.Equal(t, R.Of(120), result)
}
// TestTailRecFibonacci tests Fibonacci computation
func TestTailRecFibonacci(t *testing.T) {
type State struct {
n int
prev int
curr int
}
fibStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.n <= 0 {
return R.Of(E.Right[State](state.curr))
}
return R.Of(E.Left[int](State{state.n - 1, state.curr, state.prev + state.curr}))
}
}
fib := TailRec(fibStep)
result := fib(State{10, 0, 1})(context.Background())
assert.Equal(t, R.Of(89), result) // 10th Fibonacci number
}
// TestTailRecCountdown tests countdown computation
func TestTailRecCountdown(t *testing.T) {
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(10)(context.Background())
assert.Equal(t, R.Of(0), result)
}
// TestTailRecImmediateTermination tests immediate termination (Right on first call)
func TestTailRecImmediateTermination(t *testing.T) {
immediateStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
return R.Of(E.Right[int](n * 2))
}
}
immediate := TailRec(immediateStep)
result := immediate(42)(context.Background())
assert.Equal(t, R.Of(84), result)
}
// TestTailRecStackSafety tests that TailRec handles large iterations without stack overflow
func TestTailRecStackSafety(t *testing.T) {
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(10000)(context.Background())
assert.Equal(t, R.Of(0), result)
}
// TestTailRecSumList tests summing a list
func TestTailRecSumList(t *testing.T) {
type State struct {
list []int
sum int
}
sumStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if A.IsEmpty(state.list) {
return R.Of(E.Right[State](state.sum))
}
return R.Of(E.Left[int](State{state.list[1:], state.sum + state.list[0]}))
}
}
sumList := TailRec(sumStep)
result := sumList(State{[]int{1, 2, 3, 4, 5}, 0})(context.Background())
assert.Equal(t, R.Of(15), result)
}
// TestTailRecCollatzConjecture tests the Collatz conjecture
func TestTailRecCollatzConjecture(t *testing.T) {
collatzStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n <= 1 {
return R.Of(E.Right[int](n))
}
if n%2 == 0 {
return R.Of(E.Left[int](n / 2))
}
return R.Of(E.Left[int](3*n + 1))
}
}
collatz := TailRec(collatzStep)
result := collatz(10)(context.Background())
assert.Equal(t, R.Of(1), result)
}
// TestTailRecGCD tests greatest common divisor
func TestTailRecGCD(t *testing.T) {
type State struct {
a int
b int
}
gcdStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.b == 0 {
return R.Of(E.Right[State](state.a))
}
return R.Of(E.Left[int](State{state.b, state.a % state.b}))
}
}
gcd := TailRec(gcdStep)
result := gcd(State{48, 18})(context.Background())
assert.Equal(t, R.Of(6), result)
}
// TestTailRecErrorPropagation tests that errors are properly propagated
func TestTailRecErrorPropagation(t *testing.T) {
expectedErr := errors.New("computation error")
errorStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n == 5 {
return R.Left[E.Either[int, int]](expectedErr)
}
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
computation := TailRec(errorStep)
result := computation(10)(context.Background())
assert.True(t, R.IsLeft(result))
_, err := R.Unwrap(result)
assert.Equal(t, expectedErr, err)
}
// TestTailRecContextCancellationImmediate tests short circuit when context is already canceled
func TestTailRecContextCancellationImmediate(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
cancel() // Cancel immediately before execution
stepExecuted := false
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
stepExecuted = true
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(10)(ctx)
// Should short circuit without executing any steps
assert.False(t, stepExecuted, "Step should not be executed when context is already canceled")
assert.True(t, R.IsLeft(result))
_, err := R.Unwrap(result)
assert.Equal(t, context.Canceled, err)
}
// TestTailRecContextCancellationDuringExecution tests short circuit when context is canceled during execution
func TestTailRecContextCancellationDuringExecution(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
executionCount := 0
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
executionCount++
// Cancel after 3 iterations
if executionCount == 3 {
cancel()
}
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(100)(ctx)
// Should stop after cancellation
assert.True(t, R.IsLeft(result))
assert.LessOrEqual(t, executionCount, 4, "Should stop shortly after cancellation")
_, err := R.Unwrap(result)
assert.Equal(t, context.Canceled, err)
}
// TestTailRecContextWithTimeout tests behavior with timeout context
func TestTailRecContextWithTimeout(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 50*time.Millisecond)
defer cancel()
executionCount := 0
slowStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
executionCount++
// Simulate slow computation
time.Sleep(20 * time.Millisecond)
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
computation := TailRec(slowStep)
result := computation(100)(ctx)
// Should timeout and return error
assert.True(t, R.IsLeft(result))
assert.Less(t, executionCount, 100, "Should not complete all iterations due to timeout")
_, err := R.Unwrap(result)
assert.Equal(t, context.DeadlineExceeded, err)
}
// TestTailRecContextWithCause tests that context.Cause is properly returned
func TestTailRecContextWithCause(t *testing.T) {
customErr := errors.New("custom cancellation reason")
ctx, cancel := context.WithCancelCause(context.Background())
cancel(customErr)
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(10)(ctx)
assert.True(t, R.IsLeft(result))
_, err := R.Unwrap(result)
assert.Equal(t, customErr, err)
}
// TestTailRecContextCancellationMultipleIterations tests that cancellation is checked on each iteration
func TestTailRecContextCancellationMultipleIterations(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
executionCount := 0
maxExecutions := 5
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
executionCount++
if executionCount == maxExecutions {
cancel()
}
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(1000)(ctx)
// Should detect cancellation on next iteration check
assert.True(t, R.IsLeft(result))
// Should stop within 1-2 iterations after cancellation
assert.LessOrEqual(t, executionCount, maxExecutions+2)
_, err := R.Unwrap(result)
assert.Equal(t, context.Canceled, err)
}
// TestTailRecContextNotCanceled tests normal execution when context is not canceled
func TestTailRecContextNotCanceled(t *testing.T) {
ctx := context.Background()
executionCount := 0
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
executionCount++
if n <= 0 {
return R.Of(E.Right[int](n))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(10)(ctx)
assert.Equal(t, 11, executionCount) // 10, 9, 8, ..., 1, 0
assert.Equal(t, R.Of(0), result)
}
// TestTailRecPowerOfTwo tests computing power of 2
func TestTailRecPowerOfTwo(t *testing.T) {
type State struct {
exponent int
result int
target int
}
powerStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.exponent >= state.target {
return R.Of(E.Right[State](state.result))
}
return R.Of(E.Left[int](State{state.exponent + 1, state.result * 2, state.target}))
}
}
power := TailRec(powerStep)
result := power(State{0, 1, 10})(context.Background())
assert.Equal(t, R.Of(1024), result) // 2^10
}
// TestTailRecFindInRange tests finding a value in a range
func TestTailRecFindInRange(t *testing.T) {
type State struct {
current int
max int
target int
}
findStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.current >= state.max {
return R.Of(E.Right[State](-1)) // Not found
}
if state.current == state.target {
return R.Of(E.Right[State](state.current)) // Found
}
return R.Of(E.Left[int](State{state.current + 1, state.max, state.target}))
}
}
find := TailRec(findStep)
result := find(State{0, 100, 42})(context.Background())
assert.Equal(t, R.Of(42), result)
}
// TestTailRecFindNotInRange tests finding a value not in range
func TestTailRecFindNotInRange(t *testing.T) {
type State struct {
current int
max int
target int
}
findStep := func(state State) ReaderResult[E.Either[State, int]] {
return func(ctx context.Context) Result[E.Either[State, int]] {
if state.current >= state.max {
return R.Of(E.Right[State](-1)) // Not found
}
if state.current == state.target {
return R.Of(E.Right[State](state.current)) // Found
}
return R.Of(E.Left[int](State{state.current + 1, state.max, state.target}))
}
}
find := TailRec(findStep)
result := find(State{0, 100, 200})(context.Background())
assert.Equal(t, R.Of(-1), result)
}
// TestTailRecWithContextValue tests that context values are accessible
func TestTailRecWithContextValue(t *testing.T) {
type contextKey string
const multiplierKey contextKey = "multiplier"
ctx := context.WithValue(context.Background(), multiplierKey, 3)
countdownStep := func(n int) ReaderResult[E.Either[int, int]] {
return func(ctx context.Context) Result[E.Either[int, int]] {
if n <= 0 {
multiplier := ctx.Value(multiplierKey).(int)
return R.Of(E.Right[int](n * multiplier))
}
return R.Of(E.Left[int](n - 1))
}
}
countdown := TailRec(countdownStep)
result := countdown(5)(ctx)
assert.Equal(t, R.Of(0), result) // 0 * 3 = 0
}
// TestTailRecComplexState tests with complex state structure
func TestTailRecComplexState(t *testing.T) {
type ComplexState struct {
counter int
sum int
product int
completed bool
}
complexStep := func(state ComplexState) ReaderResult[E.Either[ComplexState, string]] {
return func(ctx context.Context) Result[E.Either[ComplexState, string]] {
if state.counter <= 0 || state.completed {
result := fmt.Sprintf("sum=%d, product=%d", state.sum, state.product)
return R.Of(E.Right[ComplexState](result))
}
newState := ComplexState{
counter: state.counter - 1,
sum: state.sum + state.counter,
product: state.product * state.counter,
completed: state.counter == 1,
}
return R.Of(E.Left[string](newState))
}
}
computation := TailRec(complexStep)
result := computation(ComplexState{5, 0, 1, false})(context.Background())
assert.Equal(t, R.Of("sum=15, product=120"), result)
}

View File

@@ -0,0 +1,84 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache LicensVersion 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
"context"
"time"
RD "github.com/IBM/fp-go/v2/reader"
R "github.com/IBM/fp-go/v2/retry"
RG "github.com/IBM/fp-go/v2/retry/generic"
)
//go:inline
func Retrying[A any](
policy R.RetryPolicy,
action Kleisli[R.RetryStatus, A],
check func(Result[A]) bool,
) ReaderResult[A] {
// delayWithCancel implements a context-aware delay mechanism for retry operations.
// It creates a timeout context that will be cancelled when either:
// 1. The delay duration expires (normal case), or
// 2. The parent context is cancelled (early termination)
//
// The function waits on timeoutCtx.Done(), which will be signaled in either case:
// - If the delay expires, timeoutCtx is cancelled by the timeout
// - If the parent ctx is cancelled, timeoutCtx inherits the cancellation
//
// After the wait completes, we dispatch to the next action by calling ri(ctx)().
// This works correctly because the action is wrapped in WithContextK, which handles
// context cancellation by checking ctx.Err() and returning an appropriate error
// (context.Canceled or context.DeadlineExceeded) when the context is cancelled.
//
// This design ensures that:
// - Retry delays respect context cancellation and terminate immediately
// - The cancellation error propagates correctly through the retry chain
// - No unnecessary delays occur when the context is already cancelled
delayWithCancel := func(delay time.Duration) RD.Operator[context.Context, R.RetryStatus, R.RetryStatus] {
return func(ri Reader[context.Context, R.RetryStatus]) Reader[context.Context, R.RetryStatus] {
return func(ctx context.Context) R.RetryStatus {
// Create a timeout context that will be cancelled when either:
// - The delay duration expires, or
// - The parent context is cancelled
timeoutCtx, cancelTimeout := context.WithTimeout(ctx, delay)
defer cancelTimeout()
// Wait for either the timeout or parent context cancellation
<-timeoutCtx.Done()
// Dispatch to the next action with the original context.
// WithContextK will handle context cancellation correctly.
return ri(ctx)
}
}
}
// get an implementation for the types
return RG.Retrying(
RD.Chain[context.Context, Result[A], Result[A]],
RD.Chain[context.Context, R.RetryStatus, Result[A]],
RD.Of[context.Context, Result[A]],
RD.Of[context.Context, R.RetryStatus],
delayWithCancel,
policy,
WithContextK(action),
check,
)
}

View File

@@ -13,13 +13,40 @@
// See the License for the specific language governing permissions and
// limitations under the License.
// package readerresult implements a specialization of the Reader monad assuming a golang context as the context of the monad and a standard golang error
// Package readerresult implements a specialization of the Reader monad assuming a golang context as the context of the monad and a standard golang error.
//
// # Pure vs Effectful Functions
//
// This package distinguishes between pure (side-effect free) and effectful (side-effectful) functions:
//
// EFFECTFUL FUNCTIONS (depend on context.Context):
// - ReaderResult[A]: func(context.Context) (A, error) - Effectful computation that needs context
// - These functions are effectful because context.Context is effectful (can be cancelled, has deadlines, carries values)
// - Use for: operations that need cancellation, timeouts, context values, or any context-dependent behavior
// - Examples: database queries, HTTP requests, operations that respect cancellation
//
// PURE FUNCTIONS (side-effect free):
// - func(State) (Value, error) - Pure computation that only depends on state, not context
// - func(State) Value - Pure transformation without errors
// - These functions are pure because they only read from their input state and don't depend on external context
// - Use for: parsing, validation, calculations, data transformations that don't need context
// - Examples: JSON parsing, input validation, mathematical computations
//
// The package provides different bind operations for each:
// - Bind: For effectful ReaderResult computations (State -> ReaderResult[Value])
// - BindResultK: For pure functions with errors (State -> (Value, error))
// - Let: For pure functions without errors (State -> Value)
// - BindReaderK: For context-dependent pure functions (State -> Reader[Context, Value])
// - BindEitherK: For pure Result/Either values (State -> Result[Value])
package readerresult
import (
"context"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/endomorphism"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/optics/prism"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readereither"
@@ -30,9 +57,13 @@ type (
Option[A any] = option.Option[A]
Either[A any] = either.Either[error, A]
Result[A any] = result.Result[A]
Reader[R, A any] = reader.Reader[R, A]
// ReaderResult is a specialization of the Reader monad for the typical golang scenario
ReaderResult[A any] = readereither.ReaderEither[context.Context, error, A]
Kleisli[A, B any] = reader.Reader[A, ReaderResult[B]]
Operator[A, B any] = Kleisli[ReaderResult[A], B]
Endomorphism[A any] = endomorphism.Endomorphism[A]
Prism[S, T any] = prism.Prism[S, T]
Lens[S, T any] = lens.Lens[S, T]
)

View File

@@ -68,7 +68,7 @@ func Of[S, A any](a A) StateReaderIOResult[S, A] {
//
// result := statereaderioresult.MonadMap(
// statereaderioresult.Of[AppState](21),
// func(x int) int { return x * 2 },
// N.Mul(2),
// ) // Result contains 42
func MonadMap[S, A, B any](fa StateReaderIOResult[S, A], f func(A) B) StateReaderIOResult[S, B] {
return statet.MonadMap[StateReaderIOResult[S, A], StateReaderIOResult[S, B]](
@@ -83,7 +83,7 @@ func MonadMap[S, A, B any](fa StateReaderIOResult[S, A], f func(A) B) StateReade
//
// Example:
//
// double := statereaderioresult.Map[AppState](func(x int) int { return x * 2 })
// double := statereaderioresult.Map[AppState](N.Mul(2))
// result := function.Pipe1(statereaderioresult.Of[AppState](21), double)
func Map[S, A, B any](f func(A) B) Operator[S, A, B] {
return statet.Map[StateReaderIOResult[S, A], StateReaderIOResult[S, B]](
@@ -135,7 +135,7 @@ func Chain[S, A, B any](f Kleisli[S, A, B]) Operator[S, A, B] {
//
// Example:
//
// fab := statereaderioresult.Of[AppState](func(x int) int { return x * 2 })
// fab := statereaderioresult.Of[AppState](N.Mul(2))
// fa := statereaderioresult.Of[AppState](21)
// result := statereaderioresult.MonadAp(fab, fa) // Result contains 42
func MonadAp[B, S, A any](fab StateReaderIOResult[S, func(A) B], fa StateReaderIOResult[S, A]) StateReaderIOResult[S, B] {

View File

@@ -19,12 +19,12 @@ import (
"context"
"testing"
RIORES "github.com/IBM/fp-go/v2/context/readerioresult"
ST "github.com/IBM/fp-go/v2/context/statereaderioresult"
EQ "github.com/IBM/fp-go/v2/eq"
L "github.com/IBM/fp-go/v2/internal/monad/testing"
P "github.com/IBM/fp-go/v2/pair"
RES "github.com/IBM/fp-go/v2/result"
RIORES "github.com/IBM/fp-go/v2/context/readerioresult"
ST "github.com/IBM/fp-go/v2/context/statereaderioresult"
)
// AssertLaws asserts the monad laws for the StateReaderIOResult monad

File diff suppressed because it is too large Load Diff

View File

@@ -23,6 +23,9 @@ github.com/IBM/fp-go/v2/readerresult/from.go:33.70,35.2 1 1
github.com/IBM/fp-go/v2/readerresult/from.go:45.80,47.2 1 1
github.com/IBM/fp-go/v2/readerresult/from.go:57.92,59.2 1 1
github.com/IBM/fp-go/v2/readerresult/from.go:69.104,71.2 1 1
github.com/IBM/fp-go/v2/readerresult/monoid.go:37.62,45.2 1 1
github.com/IBM/fp-go/v2/readerresult/monoid.go:64.70,69.2 1 1
github.com/IBM/fp-go/v2/readerresult/monoid.go:91.62,98.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:41.59,43.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:49.59,51.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:61.63,63.2 1 1
@@ -56,6 +59,8 @@ github.com/IBM/fp-go/v2/readerresult/reader.go:453.85,455.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:460.55,462.2 1 0
github.com/IBM/fp-go/v2/readerresult/reader.go:473.94,475.2 1 0
github.com/IBM/fp-go/v2/readerresult/reader.go:486.65,488.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:494.103,502.2 1 1
github.com/IBM/fp-go/v2/readerresult/reader.go:508.71,515.2 1 0
github.com/IBM/fp-go/v2/readerresult/sequence.go:35.78,40.2 1 1
github.com/IBM/fp-go/v2/readerresult/sequence.go:54.35,60.2 1 1
github.com/IBM/fp-go/v2/readerresult/sequence.go:75.38,82.2 1 1

View File

@@ -103,11 +103,11 @@ func (t *token[T]) Unerase(val any) Result[T] {
func (t *token[T]) ProviderFactory() Option[DIE.ProviderFactory] {
return t.base.providerFactory
}
func makeTokenBase(name string, id string, typ int, providerFactory Option[DIE.ProviderFactory]) *tokenBase {
func makeTokenBase(name, id string, typ int, providerFactory Option[DIE.ProviderFactory]) *tokenBase {
return &tokenBase{name, id, typ, providerFactory}
}
func makeToken[T any](name string, id string, typ int, unerase func(val any) Result[T], providerFactory Option[DIE.ProviderFactory]) Dependency[T] {
func makeToken[T any](name, id string, typ int, unerase func(val any) Result[T], providerFactory Option[DIE.ProviderFactory]) Dependency[T] {
return &token[T]{makeTokenBase(name, id, typ, providerFactory), unerase}
}

View File

@@ -75,7 +75,7 @@ func TraverseArray[E, A, B any](f Kleisli[E, A, B]) Kleisli[E, []A, []B] {
// Example:
//
// validate := func(i int, s string) either.Either[error, string] {
// if len(s) > 0 {
// if S.IsNonEmpty(s) {
// return either.Right[error](fmt.Sprintf("%d:%s", i, s))
// }
// return either.Left[string](fmt.Errorf("empty at index %d", i))
@@ -105,7 +105,7 @@ func TraverseArrayWithIndexG[GA ~[]A, GB ~[]B, E, A, B any](f func(int, A) Eithe
// Example:
//
// validate := func(i int, s string) either.Either[error, string] {
// if len(s) > 0 {
// if S.IsNonEmpty(s) {
// return either.Right[error](fmt.Sprintf("%d:%s", i, s))
// }
// return either.Left[string](fmt.Errorf("empty at index %d", i))

View File

@@ -34,7 +34,7 @@ func Curry0[R any](f func() (R, error)) func() Either[error, R] {
//
// Example:
//
// parse := func(s string) (int, error) { return strconv.Atoi(s) }
// parse := strconv.Atoi
// curried := either.Curry1(parse)
// result := curried("42") // Right(42)
func Curry1[T1, R any](f func(T1) (R, error)) func(T1) Either[error, R] {

View File

@@ -19,6 +19,21 @@
// - Left represents an error or failure case (type E)
// - Right represents a success case (type A)
//
// # Fantasy Land Specification
//
// This implementation corresponds to the Fantasy Land Either type:
// https://github.com/fantasyland/fantasy-land#either
//
// Implemented Fantasy Land algebras:
// - Functor: https://github.com/fantasyland/fantasy-land#functor
// - Bifunctor: https://github.com/fantasyland/fantasy-land#bifunctor
// - Apply: https://github.com/fantasyland/fantasy-land#apply
// - Applicative: https://github.com/fantasyland/fantasy-land#applicative
// - Chain: https://github.com/fantasyland/fantasy-land#chain
// - Monad: https://github.com/fantasyland/fantasy-land#monad
// - Alt: https://github.com/fantasyland/fantasy-land#alt
// - Foldable: https://github.com/fantasyland/fantasy-land#foldable
//
// # Core Concepts
//
// The Either type is a discriminated union that can hold either a Left value (typically an error)

View File

@@ -22,8 +22,9 @@ import (
"testing"
F "github.com/IBM/fp-go/v2/function"
M "github.com/IBM/fp-go/v2/monoid"
N "github.com/IBM/fp-go/v2/number"
O "github.com/IBM/fp-go/v2/option"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
)
@@ -305,7 +306,7 @@ func TestTraverseArray(t *testing.T) {
// Test TraverseArrayWithIndex
func TestTraverseArrayWithIndex(t *testing.T) {
validate := func(i int, s string) Either[error, string] {
if len(s) > 0 {
if S.IsNonEmpty(s) {
return Right[error](fmt.Sprintf("%d:%s", i, s))
}
return Left[string](fmt.Errorf("empty at index %d", i))
@@ -334,7 +335,7 @@ func TestTraverseRecord(t *testing.T) {
// Test TraverseRecordWithIndex
func TestTraverseRecordWithIndex(t *testing.T) {
validate := func(k string, v string) Either[error, string] {
if len(v) > 0 {
if S.IsNonEmpty(v) {
return Right[error](k + ":" + v)
}
return Left[string](fmt.Errorf("empty value for key %s", k))
@@ -373,7 +374,7 @@ func TestCurry0(t *testing.T) {
}
func TestCurry1(t *testing.T) {
parse := func(s string) (int, error) { return strconv.Atoi(s) }
parse := strconv.Atoi
curried := Curry1(parse)
result := curried("42")
assert.Equal(t, Right[error](42), result)
@@ -645,7 +646,7 @@ func TestAltSemigroup(t *testing.T) {
// Test AlternativeMonoid
func TestAlternativeMonoid(t *testing.T) {
intAdd := M.MakeMonoid(func(a, b int) int { return a + b }, 0)
intAdd := N.MonoidSum[int]()
m := AlternativeMonoid[error](intAdd)
result := m.Concat(Right[error](1), Right[error](2))

View File

@@ -22,7 +22,6 @@ import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/utils"
IO "github.com/IBM/fp-go/v2/io"
O "github.com/IBM/fp-go/v2/option"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
@@ -120,10 +119,3 @@ func TestStringer(t *testing.T) {
var s fmt.Stringer = &e
assert.Equal(t, exp, s.String())
}
func TestFromIO(t *testing.T) {
f := IO.Of("abc")
e := FromIO[error](f)
assert.Equal(t, Right[error]("abc"), e)
}

View File

@@ -17,11 +17,19 @@ package either
import (
"log"
"log/slog"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/logging"
)
var (
// slogError creates a slog.Attr with key "error" for logging error values
slogError = F.Bind1st(slog.Any, "error")
// slogValue creates a slog.Attr with key "value" for logging success values
slogValue = F.Bind1st(slog.Any, "value")
)
func _log[E, A any](left func(string, ...any), right func(string, ...any), prefix string) Operator[E, A, A] {
return Fold(
func(e E) Either[E, A] {
@@ -62,3 +70,91 @@ func Logger[E, A any](loggers ...*log.Logger) func(string) Operator[E, A, A] {
}
}
}
// ToSLogAttr converts an Either value to a structured logging attribute (slog.Attr).
//
// This function creates a converter that transforms Either values into slog.Attr for use
// with Go's structured logging (log/slog). It maps:
// - Left values to an "error" attribute
// - Right values to a "value" attribute
//
// This is particularly useful when integrating Either-based error handling with structured
// logging systems, allowing you to log both successful values and errors in a consistent,
// structured format.
//
// Type Parameters:
// - E: The Left (error) type of the Either
// - A: The Right (success) type of the Either
//
// Returns:
// - A function that converts Either[E, A] to slog.Attr
//
// Example with Left (error):
//
// converter := either.ToSLogAttr[error, int]()
// leftValue := either.Left[int](errors.New("connection failed"))
// attr := converter(leftValue)
// // attr is: slog.Any("error", errors.New("connection failed"))
//
// logger.LogAttrs(ctx, slog.LevelError, "Operation failed", attr)
// // Logs: {"level":"error","msg":"Operation failed","error":"connection failed"}
//
// Example with Right (success):
//
// converter := either.ToSLogAttr[error, User]()
// rightValue := either.Right[error](User{ID: 123, Name: "Alice"})
// attr := converter(rightValue)
// // attr is: slog.Any("value", User{ID: 123, Name: "Alice"})
//
// logger.LogAttrs(ctx, slog.LevelInfo, "User fetched", attr)
// // Logs: {"level":"info","msg":"User fetched","value":{"ID":123,"Name":"Alice"}}
//
// Example in a pipeline with structured logging:
//
// toAttr := either.ToSLogAttr[error, Data]()
//
// result := F.Pipe2(
// fetchData(id),
// either.Map(processData),
// either.Map(validateData),
// )
//
// attr := toAttr(result)
// logger.LogAttrs(ctx, slog.LevelInfo, "Data processing complete", attr)
// // Logs success: {"level":"info","msg":"Data processing complete","value":{...}}
// // Or error: {"level":"info","msg":"Data processing complete","error":"validation failed"}
//
// Example with custom log levels based on Either:
//
// toAttr := either.ToSLogAttr[error, Response]()
// result := callAPI(endpoint)
//
// level := either.Fold(
// func(error) slog.Level { return slog.LevelError },
// func(Response) slog.Level { return slog.LevelInfo },
// )(result)
//
// logger.LogAttrs(ctx, level, "API call completed", toAttr(result))
//
// Use Cases:
// - Structured logging: Convert Either results to structured log attributes
// - Error tracking: Log errors with consistent "error" key in structured logs
// - Success monitoring: Log successful values with consistent "value" key
// - Observability: Integrate Either-based error handling with logging systems
// - Debugging: Inspect Either values in logs with proper structure
// - Metrics: Extract Either values for metrics collection in logging pipelines
//
// Note: The returned slog.Attr uses "error" for Left values and "value" for Right values.
// These keys are consistent with common structured logging conventions.
func ToSLogAttr[E, A any]() func(Either[E, A]) slog.Attr {
return Fold(
F.Flow2(
F.ToAny[E],
slogError,
),
F.Flow2(
F.ToAny[A],
slogValue,
),
)
}

View File

@@ -16,9 +16,12 @@
package either
import (
"errors"
"log/slog"
"testing"
F "github.com/IBM/fp-go/v2/function"
N "github.com/IBM/fp-go/v2/number"
"github.com/stretchr/testify/assert"
)
@@ -35,3 +38,139 @@ func TestLogger(t *testing.T) {
assert.Equal(t, r, res)
}
func TestToSLogAttr_Left(t *testing.T) {
// Test with Left (error) value
converter := ToSLogAttr[error, int]()
testErr := errors.New("test error")
leftValue := Left[int](testErr)
attr := converter(leftValue)
// Verify the attribute has the correct key
assert.Equal(t, "error", attr.Key)
// Verify the attribute value is the error
assert.Equal(t, testErr, attr.Value.Any())
}
func TestToSLogAttr_Right(t *testing.T) {
// Test with Right (success) value
converter := ToSLogAttr[error, string]()
rightValue := Right[error]("success value")
attr := converter(rightValue)
// Verify the attribute has the correct key
assert.Equal(t, "value", attr.Key)
// Verify the attribute value is the success value
assert.Equal(t, "success value", attr.Value.Any())
}
func TestToSLogAttr_LeftWithCustomType(t *testing.T) {
// Test with custom error type
type CustomError struct {
Code int
Message string
}
converter := ToSLogAttr[CustomError, string]()
customErr := CustomError{Code: 404, Message: "not found"}
leftValue := Left[string](customErr)
attr := converter(leftValue)
assert.Equal(t, "error", attr.Key)
assert.Equal(t, customErr, attr.Value.Any())
}
func TestToSLogAttr_RightWithCustomType(t *testing.T) {
// Test with custom success type
type User struct {
ID int
Name string
}
converter := ToSLogAttr[error, User]()
user := User{ID: 123, Name: "Alice"}
rightValue := Right[error](user)
attr := converter(rightValue)
assert.Equal(t, "value", attr.Key)
assert.Equal(t, user, attr.Value.Any())
}
func TestToSLogAttr_InPipeline(t *testing.T) {
// Test ToSLogAttr in a functional pipeline
converter := ToSLogAttr[error, int]()
// Test with successful pipeline
successResult := F.Pipe2(
Right[error](10),
Map[error](N.Mul(2)),
converter,
)
assert.Equal(t, "value", successResult.Key)
// slog.Any converts int to int64
assert.Equal(t, int64(20), successResult.Value.Any())
// Test with failed pipeline
testErr := errors.New("computation failed")
failureResult := F.Pipe2(
Left[int](testErr),
Map[error](N.Mul(2)),
converter,
)
assert.Equal(t, "error", failureResult.Key)
assert.Equal(t, testErr, failureResult.Value.Any())
}
func TestToSLogAttr_WithNilError(t *testing.T) {
// Test with nil error (edge case)
converter := ToSLogAttr[error, string]()
var nilErr error = nil
leftValue := Left[string](nilErr)
attr := converter(leftValue)
assert.Equal(t, "error", attr.Key)
assert.Nil(t, attr.Value.Any())
}
func TestToSLogAttr_WithZeroValue(t *testing.T) {
// Test with zero value of success type
converter := ToSLogAttr[error, int]()
rightValue := Right[error](0)
attr := converter(rightValue)
assert.Equal(t, "value", attr.Key)
// slog.Any converts int to int64
assert.Equal(t, int64(0), attr.Value.Any())
}
func TestToSLogAttr_WithEmptyString(t *testing.T) {
// Test with empty string as success value
converter := ToSLogAttr[error, string]()
rightValue := Right[error]("")
attr := converter(rightValue)
assert.Equal(t, "value", attr.Key)
assert.Equal(t, "", attr.Value.Any())
}
func TestToSLogAttr_AttributeKind(t *testing.T) {
// Verify that the returned attribute has the correct Kind
converter := ToSLogAttr[error, string]()
leftAttr := converter(Left[string](errors.New("error")))
// Errors are stored as KindAny (which has value 0)
assert.Equal(t, slog.KindAny, leftAttr.Value.Kind())
rightAttr := converter(Right[error]("value"))
// Strings have KindString
assert.Equal(t, slog.KindString, rightAttr.Value.Kind())
}

View File

@@ -95,11 +95,11 @@ func (o *eitherMonad[E, A, B]) Chain(f Kleisli[E, A, B]) Operator[E, A, B] {
// m := either.Monad[error, int, int]()
//
// // Map transforms the value
// value := m.Map(func(x int) int { return x * 2 })(either.Right[error](21))
// value := m.Map(N.Mul(2))(either.Right[error](21))
// // value is Right(42)
//
// // Ap applies wrapped functions (also fails fast)
// fn := either.Right[error](func(x int) int { return x + 1 })
// fn := either.Right[error](N.Add(1))
// result := m.Ap(value)(fn)
// // result is Right(43)
//

34
v2/either/rec.go Normal file
View File

@@ -0,0 +1,34 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package either
//go:inline
func TailRec[E, A, B any](f Kleisli[E, A, Either[A, B]]) Kleisli[E, A, B] {
return func(a A) Either[E, B] {
current := f(a)
for {
rec, e := Unwrap(current)
if IsLeft(current) {
return Left[B](e)
}
b, a := Unwrap(rec)
if IsRight(rec) {
return Right[E](b)
}
current = f(a)
}
}
}

406
v2/endomorphism/builder.go Normal file
View File

@@ -0,0 +1,406 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package endomorphism
import (
"github.com/IBM/fp-go/v2/function"
A "github.com/IBM/fp-go/v2/internal/array"
)
// Build applies an endomorphism to the zero value of type A, effectively using
// the endomorphism as a builder pattern.
//
// # Endomorphism as Builder Pattern
//
// An endomorphism (a function from type A to type A) can be viewed as a builder pattern
// because it transforms a value of a type into another value of the same type. When you
// compose multiple endomorphisms together, you create a pipeline of transformations that
// build up a final value step by step.
//
// The Build function starts with the zero value of type A and applies the endomorphism
// to it, making it particularly useful for building complex values from scratch using
// a functional composition of transformations.
//
// # Builder Pattern Characteristics
//
// Traditional builder patterns have these characteristics:
// 1. Start with an initial (often empty) state
// 2. Apply a series of transformations/configurations
// 3. Return the final built object
//
// Endomorphisms provide the same pattern functionally:
// 1. Start with zero value: var a A
// 2. Apply composed endomorphisms: e(a)
// 3. Return the transformed value
//
// # Type Parameters
//
// - A: The type being built/transformed
//
// # Parameters
//
// - e: An endomorphism (or composition of endomorphisms) that transforms type A
//
// # Returns
//
// The result of applying the endomorphism to the zero value of type A
//
// # Example - Building a Configuration Object
//
// type Config struct {
// Host string
// Port int
// Timeout time.Duration
// Debug bool
// }
//
// // Define builder functions as endomorphisms
// withHost := func(host string) Endomorphism[Config] {
// return func(c Config) Config {
// c.Host = host
// return c
// }
// }
//
// withPort := func(port int) Endomorphism[Config] {
// return func(c Config) Config {
// c.Port = port
// return c
// }
// }
//
// withTimeout := func(d time.Duration) Endomorphism[Config] {
// return func(c Config) Config {
// c.Timeout = d
// return c
// }
// }
//
// withDebug := func(debug bool) Endomorphism[Config] {
// return func(c Config) Config {
// c.Debug = debug
// return c
// }
// }
//
// // Compose builders using monoid operations
// import M "github.com/IBM/fp-go/v2/monoid"
//
// configBuilder := M.ConcatAll(Monoid[Config]())(
// withHost("localhost"),
// withPort(8080),
// withTimeout(30 * time.Second),
// withDebug(true),
// )
//
// // Build the final configuration
// config := Build(configBuilder)
// // Result: Config{Host: "localhost", Port: 8080, Timeout: 30s, Debug: true}
//
// # Example - Building a String with Transformations
//
// import (
// "strings"
// M "github.com/IBM/fp-go/v2/monoid"
// )
//
// // Define string transformation endomorphisms
// appendHello := func(s string) string { return s + "Hello" }
// appendSpace := func(s string) string { return s + " " }
// appendWorld := func(s string) string { return s + "World" }
// toUpper := strings.ToUpper
//
// // Compose transformations
// stringBuilder := M.ConcatAll(Monoid[string]())(
// appendHello,
// appendSpace,
// appendWorld,
// toUpper,
// )
//
// // Build the final string from empty string
// result := Build(stringBuilder)
// // Result: "HELLO WORLD"
//
// # Example - Building a Slice with Operations
//
// type IntSlice []int
//
// appendValue := func(v int) Endomorphism[IntSlice] {
// return func(s IntSlice) IntSlice {
// return append(s, v)
// }
// }
//
// sortSlice := func(s IntSlice) IntSlice {
// sorted := make(IntSlice, len(s))
// copy(sorted, s)
// sort.Ints(sorted)
// return sorted
// }
//
// // Build a sorted slice
// sliceBuilder := M.ConcatAll(Monoid[IntSlice]())(
// appendValue(5),
// appendValue(2),
// appendValue(8),
// appendValue(1),
// sortSlice,
// )
//
// result := Build(sliceBuilder)
// // Result: IntSlice{1, 2, 5, 8}
//
// # Advantages of Endomorphism Builder Pattern
//
// 1. **Composability**: Builders can be composed using monoid operations
// 2. **Immutability**: Each transformation returns a new value (if implemented immutably)
// 3. **Type Safety**: The type system ensures all transformations work on the same type
// 4. **Reusability**: Individual builder functions can be reused and combined differently
// 5. **Testability**: Each transformation can be tested independently
// 6. **Declarative**: The composition clearly expresses the building process
//
// # Comparison with Traditional Builder Pattern
//
// Traditional OOP Builder:
//
// config := NewConfigBuilder().
// WithHost("localhost").
// WithPort(8080).
// WithTimeout(30 * time.Second).
// Build()
//
// Endomorphism Builder:
//
// config := Build(M.ConcatAll(Monoid[Config]())(
// withHost("localhost"),
// withPort(8080),
// withTimeout(30 * time.Second),
// ))
//
// Both achieve the same goal, but the endomorphism approach:
// - Uses pure functions instead of methods
// - Leverages algebraic properties (monoid) for composition
// - Allows for more flexible composition patterns
// - Integrates naturally with other functional programming constructs
func Build[A any](e Endomorphism[A]) A {
var a A
return e(a)
}
// ConcatAll combines multiple endomorphisms into a single endomorphism using composition.
//
// This function takes a slice of endomorphisms and combines them using the monoid's
// concat operation (which is composition). The resulting endomorphism, when applied,
// will execute all the input endomorphisms in RIGHT-TO-LEFT order (mathematical composition order).
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT:
// - ConcatAll([]Endomorphism{f, g, h}) creates an endomorphism that applies h, then g, then f
// - This is equivalent to f ∘ g ∘ h in mathematical notation
// - The last endomorphism in the slice is applied first
//
// If the slice is empty, returns the identity endomorphism.
//
// # Type Parameters
//
// - T: The type that the endomorphisms operate on
//
// # Parameters
//
// - es: A slice of endomorphisms to combine
//
// # Returns
//
// A single endomorphism that represents the composition of all input endomorphisms
//
// # Example - Basic Composition
//
// double := N.Mul(2)
// increment := N.Add(1)
// square := func(x int) int { return x * x }
//
// // Combine endomorphisms (RIGHT-TO-LEFT execution)
// combined := ConcatAll([]Endomorphism[int]{double, increment, square})
// result := combined(5)
// // Execution: square(5) = 25, increment(25) = 26, double(26) = 52
// // Result: 52
//
// # Example - Building with ConcatAll
//
// type Config struct {
// Host string
// Port int
// }
//
// withHost := func(host string) Endomorphism[Config] {
// return func(c Config) Config {
// c.Host = host
// return c
// }
// }
//
// withPort := func(port int) Endomorphism[Config] {
// return func(c Config) Config {
// c.Port = port
// return c
// }
// }
//
// // Combine configuration builders
// configBuilder := ConcatAll([]Endomorphism[Config]{
// withHost("localhost"),
// withPort(8080),
// })
//
// // Apply to zero value
// config := Build(configBuilder)
// // Result: Config{Host: "localhost", Port: 8080}
//
// # Example - Empty Slice
//
// // Empty slice returns identity
// identity := ConcatAll([]Endomorphism[int]{})
// result := identity(42) // Returns: 42
//
// # Relationship to Monoid
//
// ConcatAll is equivalent to using M.ConcatAll with the endomorphism Monoid:
//
// import M "github.com/IBM/fp-go/v2/monoid"
//
// // These are equivalent:
// result1 := ConcatAll(endomorphisms)
// result2 := M.ConcatAll(Monoid[T]())(endomorphisms)
//
// # Use Cases
//
// 1. **Pipeline Construction**: Build transformation pipelines from individual steps
// 2. **Configuration Building**: Combine multiple configuration setters
// 3. **Data Transformation**: Chain multiple data transformations
// 4. **Middleware Composition**: Combine middleware functions
// 5. **Validation Chains**: Compose multiple validation functions
func ConcatAll[T any](es []Endomorphism[T]) Endomorphism[T] {
return A.Reduce(es, MonadCompose[T], function.Identity[T])
}
// Reduce applies a slice of endomorphisms to the zero value of type T in LEFT-TO-RIGHT order.
//
// This function is a convenience wrapper that:
// 1. Starts with the zero value of type T
// 2. Applies each endomorphism in the slice from left to right
// 3. Returns the final transformed value
//
// IMPORTANT: Execution order is LEFT-TO-RIGHT:
// - Reduce([]Endomorphism{f, g, h}) applies f first, then g, then h
// - This is the opposite of ConcatAll's RIGHT-TO-LEFT order
// - Each endomorphism receives the result of the previous one
//
// This is equivalent to: Build(ConcatAll(reverse(es))) but more efficient and clearer
// for left-to-right sequential application.
//
// # Type Parameters
//
// - T: The type being transformed
//
// # Parameters
//
// - es: A slice of endomorphisms to apply sequentially
//
// # Returns
//
// The final value after applying all endomorphisms to the zero value
//
// # Example - Sequential Transformations
//
// double := N.Mul(2)
// increment := N.Add(1)
// square := func(x int) int { return x * x }
//
// // Apply transformations LEFT-TO-RIGHT
// result := Reduce([]Endomorphism[int]{double, increment, square})
// // Execution: 0 -> double(0) = 0 -> increment(0) = 1 -> square(1) = 1
// // Result: 1
//
// // With a non-zero starting point, use a custom initial value:
// addTen := N.Add(10)
// result2 := Reduce([]Endomorphism[int]{addTen, double, increment})
// // Execution: 0 -> addTen(0) = 10 -> double(10) = 20 -> increment(20) = 21
// // Result: 21
//
// # Example - Building a String
//
// appendHello := func(s string) string { return s + "Hello" }
// appendSpace := func(s string) string { return s + " " }
// appendWorld := func(s string) string { return s + "World" }
//
// // Build string LEFT-TO-RIGHT
// result := Reduce([]Endomorphism[string]{
// appendHello,
// appendSpace,
// appendWorld,
// })
// // Execution: "" -> "Hello" -> "Hello " -> "Hello World"
// // Result: "Hello World"
//
// # Example - Configuration Building
//
// type Settings struct {
// Theme string
// FontSize int
// }
//
// withTheme := func(theme string) Endomorphism[Settings] {
// return func(s Settings) Settings {
// s.Theme = theme
// return s
// }
// }
//
// withFontSize := func(size int) Endomorphism[Settings] {
// return func(s Settings) Settings {
// s.FontSize = size
// return s
// }
// }
//
// // Build settings LEFT-TO-RIGHT
// settings := Reduce([]Endomorphism[Settings]{
// withTheme("dark"),
// withFontSize(14),
// })
// // Result: Settings{Theme: "dark", FontSize: 14}
//
// # Comparison with ConcatAll
//
// // ConcatAll: RIGHT-TO-LEFT composition, returns endomorphism
// endo := ConcatAll([]Endomorphism[int]{f, g, h})
// result1 := endo(value) // Applies h, then g, then f
//
// // Reduce: LEFT-TO-RIGHT application, returns final value
// result2 := Reduce([]Endomorphism[int]{f, g, h})
// // Applies f to zero, then g, then h
//
// # Use Cases
//
// 1. **Sequential Processing**: Apply transformations in order
// 2. **Pipeline Execution**: Execute a pipeline from start to finish
// 3. **Builder Pattern**: Build objects step by step
// 4. **State Machines**: Apply state transitions in sequence
// 5. **Data Flow**: Transform data through multiple stages
func Reduce[T any](es []Endomorphism[T]) T {
var t T
return A.Reduce(es, func(t T, e Endomorphism[T]) T { return e(t) }, t)
}

View File

@@ -0,0 +1,254 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package endomorphism_test
import (
"fmt"
"time"
A "github.com/IBM/fp-go/v2/array"
"github.com/IBM/fp-go/v2/endomorphism"
M "github.com/IBM/fp-go/v2/monoid"
N "github.com/IBM/fp-go/v2/number"
)
// Example_build_basicUsage demonstrates basic usage of the Build function
// to construct a value from the zero value using endomorphisms.
func Example_build_basicUsage() {
// Define simple endomorphisms
addTen := N.Add(10)
double := N.Mul(2)
// Compose them using monoid (RIGHT-TO-LEFT execution)
// double is applied first, then addTen
builder := M.ConcatAll(endomorphism.Monoid[int]())(A.From(
addTen,
double,
))
// Build from zero value: 0 * 2 = 0, 0 + 10 = 10
result := endomorphism.Build(builder)
fmt.Println(result)
// Output: 10
}
// Example_build_configBuilder demonstrates using Build as a configuration builder pattern.
func Example_build_configBuilder() {
type Config struct {
Host string
Port int
Timeout time.Duration
Debug bool
}
// Define builder functions as endomorphisms
withHost := func(host string) endomorphism.Endomorphism[Config] {
return func(c Config) Config {
c.Host = host
return c
}
}
withPort := func(port int) endomorphism.Endomorphism[Config] {
return func(c Config) Config {
c.Port = port
return c
}
}
withTimeout := func(d time.Duration) endomorphism.Endomorphism[Config] {
return func(c Config) Config {
c.Timeout = d
return c
}
}
withDebug := func(debug bool) endomorphism.Endomorphism[Config] {
return func(c Config) Config {
c.Debug = debug
return c
}
}
// Compose builders using monoid
configBuilder := M.ConcatAll(endomorphism.Monoid[Config]())([]endomorphism.Endomorphism[Config]{
withHost("localhost"),
withPort(8080),
withTimeout(30 * time.Second),
withDebug(true),
})
// Build the configuration from zero value
config := endomorphism.Build(configBuilder)
fmt.Printf("Host: %s\n", config.Host)
fmt.Printf("Port: %d\n", config.Port)
fmt.Printf("Timeout: %v\n", config.Timeout)
fmt.Printf("Debug: %v\n", config.Debug)
// Output:
// Host: localhost
// Port: 8080
// Timeout: 30s
// Debug: true
}
// Example_build_stringBuilder demonstrates building a string using endomorphisms.
func Example_build_stringBuilder() {
// Define string transformation endomorphisms
appendHello := func(s string) string { return s + "Hello" }
appendSpace := func(s string) string { return s + " " }
appendWorld := func(s string) string { return s + "World" }
appendExclamation := func(s string) string { return s + "!" }
// Compose transformations (RIGHT-TO-LEFT execution)
stringBuilder := M.ConcatAll(endomorphism.Monoid[string]())([]endomorphism.Endomorphism[string]{
appendHello,
appendSpace,
appendWorld,
appendExclamation,
})
// Build the string from empty string
result := endomorphism.Build(stringBuilder)
fmt.Println(result)
// Output: !World Hello
}
// Example_build_personBuilder demonstrates building a complex struct using the builder pattern.
func Example_build_personBuilder() {
type Person struct {
FirstName string
LastName string
Age int
Email string
}
// Define builder functions
withFirstName := func(name string) endomorphism.Endomorphism[Person] {
return func(p Person) Person {
p.FirstName = name
return p
}
}
withLastName := func(name string) endomorphism.Endomorphism[Person] {
return func(p Person) Person {
p.LastName = name
return p
}
}
withAge := func(age int) endomorphism.Endomorphism[Person] {
return func(p Person) Person {
p.Age = age
return p
}
}
withEmail := func(email string) endomorphism.Endomorphism[Person] {
return func(p Person) Person {
p.Email = email
return p
}
}
// Build a person
personBuilder := M.ConcatAll(endomorphism.Monoid[Person]())([]endomorphism.Endomorphism[Person]{
withFirstName("Alice"),
withLastName("Smith"),
withAge(30),
withEmail("alice.smith@example.com"),
})
person := endomorphism.Build(personBuilder)
fmt.Printf("%s %s, Age: %d, Email: %s\n",
person.FirstName, person.LastName, person.Age, person.Email)
// Output: Alice Smith, Age: 30, Email: alice.smith@example.com
}
// Example_build_conditionalBuilder demonstrates conditional building using endomorphisms.
func Example_build_conditionalBuilder() {
type Settings struct {
Theme string
FontSize int
AutoSave bool
Animations bool
}
withTheme := func(theme string) endomorphism.Endomorphism[Settings] {
return func(s Settings) Settings {
s.Theme = theme
return s
}
}
withFontSize := func(size int) endomorphism.Endomorphism[Settings] {
return func(s Settings) Settings {
s.FontSize = size
return s
}
}
withAutoSave := func(enabled bool) endomorphism.Endomorphism[Settings] {
return func(s Settings) Settings {
s.AutoSave = enabled
return s
}
}
withAnimations := func(enabled bool) endomorphism.Endomorphism[Settings] {
return func(s Settings) Settings {
s.Animations = enabled
return s
}
}
// Build settings conditionally
isDarkMode := true
isAccessibilityMode := true
// Note: Monoid executes RIGHT-TO-LEFT, so later items in the slice are applied first
// We need to add items in reverse order for the desired effect
builders := []endomorphism.Endomorphism[Settings]{}
if isAccessibilityMode {
builders = append(builders, withFontSize(18)) // Will be applied last (overrides)
builders = append(builders, withAnimations(false))
}
if isDarkMode {
builders = append(builders, withTheme("dark"))
} else {
builders = append(builders, withTheme("light"))
}
builders = append(builders, withAutoSave(true))
builders = append(builders, withFontSize(14)) // Will be applied first
settingsBuilder := M.ConcatAll(endomorphism.Monoid[Settings]())(builders)
settings := endomorphism.Build(settingsBuilder)
fmt.Printf("Theme: %s\n", settings.Theme)
fmt.Printf("FontSize: %d\n", settings.FontSize)
fmt.Printf("AutoSave: %v\n", settings.AutoSave)
fmt.Printf("Animations: %v\n", settings.Animations)
// Output:
// Theme: dark
// FontSize: 18
// AutoSave: true
// Animations: false
}

View File

@@ -37,7 +37,7 @@
//
// // Define some endomorphisms
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // Compose them (RIGHT-TO-LEFT execution)
// composed := endomorphism.Compose(double, increment)
@@ -63,8 +63,8 @@
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)
// combined := M.ConcatAll(monoid)(
// N.Mul(2), // applied third
// func(x int) int { return x + 1 }, // applied second
// func(x int) int { return x * 3 }, // applied first
// N.Add(1), // applied second
// N.Mul(3), // applied first
// )
// result := combined(5) // (5 * 3) = 15, (15 + 1) = 16, (16 * 2) = 32
//
@@ -75,7 +75,7 @@
//
// // Chain allows sequencing of endomorphisms (LEFT-TO-RIGHT)
// f := N.Mul(2)
// g := func(x int) int { return x + 1 }
// g := N.Add(1)
// chained := endomorphism.MonadChain(f, g) // f first, then g
// result := chained(5) // (5 * 2) + 1 = 11
//
@@ -84,7 +84,7 @@
// The key difference between Compose and Chain/MonadChain is execution order:
//
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // Compose: RIGHT-TO-LEFT (mathematical composition)
// composed := endomorphism.Compose(double, increment)

View File

@@ -38,10 +38,10 @@ import (
// Example:
//
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// result := endomorphism.MonadAp(double, increment) // Composes: double ∘ increment
// // result(5) = double(increment(5)) = double(6) = 12
func MonadAp[A any](fab Endomorphism[A], fa Endomorphism[A]) Endomorphism[A] {
func MonadAp[A any](fab, fa Endomorphism[A]) Endomorphism[A] {
return MonadCompose(fab, fa)
}
@@ -62,7 +62,7 @@ func MonadAp[A any](fab Endomorphism[A], fa Endomorphism[A]) Endomorphism[A] {
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// applyIncrement := endomorphism.Ap(increment)
// double := N.Mul(2)
// composed := applyIncrement(double) // double ∘ increment
@@ -92,7 +92,7 @@ func Ap[A any](fa Endomorphism[A]) Operator[A] {
// Example:
//
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // MonadCompose executes RIGHT-TO-LEFT: increment first, then double
// composed := endomorphism.MonadCompose(double, increment)
@@ -124,7 +124,7 @@ func MonadCompose[A any](f, g Endomorphism[A]) Endomorphism[A] {
// Example:
//
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// mapped := endomorphism.MonadMap(double, increment)
// // mapped(5) = double(increment(5)) = double(6) = 12
func MonadMap[A any](f, g Endomorphism[A]) Endomorphism[A] {
@@ -151,7 +151,7 @@ func MonadMap[A any](f, g Endomorphism[A]) Endomorphism[A] {
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// composeWithIncrement := endomorphism.Compose(increment)
// double := N.Mul(2)
//
@@ -188,7 +188,7 @@ func Compose[A any](g Endomorphism[A]) Operator[A] {
//
// double := N.Mul(2)
// mapDouble := endomorphism.Map(double)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// mapped := mapDouble(increment)
// // mapped(5) = double(increment(5)) = double(6) = 12
func Map[A any](f Endomorphism[A]) Operator[A] {
@@ -216,7 +216,7 @@ func Map[A any](f Endomorphism[A]) Operator[A] {
// Example:
//
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // MonadChain executes LEFT-TO-RIGHT: double first, then increment
// chained := endomorphism.MonadChain(double, increment)
@@ -225,7 +225,7 @@ func Map[A any](f Endomorphism[A]) Operator[A] {
// // Compare with MonadCompose which executes RIGHT-TO-LEFT:
// composed := endomorphism.MonadCompose(increment, double)
// result2 := composed(5) // (5 * 2) + 1 = 11 (same result, different parameter order)
func MonadChain[A any](ma Endomorphism[A], f Endomorphism[A]) Endomorphism[A] {
func MonadChain[A any](ma, f Endomorphism[A]) Endomorphism[A] {
return function.Flow2(ma, f)
}
@@ -247,7 +247,7 @@ func MonadChain[A any](ma Endomorphism[A], f Endomorphism[A]) Endomorphism[A] {
// log := func(x int) int { fmt.Println(x); return x }
// chained := endomorphism.MonadChainFirst(double, log)
// result := chained(5) // Prints 10, returns 10
func MonadChainFirst[A any](ma Endomorphism[A], f Endomorphism[A]) Endomorphism[A] {
func MonadChainFirst[A any](ma, f Endomorphism[A]) Endomorphism[A] {
return func(a A) A {
result := ma(a)
f(result) // Apply f for its effect
@@ -294,7 +294,7 @@ func ChainFirst[A any](f Endomorphism[A]) Operator[A] {
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// chainWithIncrement := endomorphism.Chain(increment)
// double := N.Mul(2)
//

View File

@@ -206,7 +206,7 @@ func TestCompose(t *testing.T) {
// TestMonadComposeVsCompose demonstrates the relationship between MonadCompose and Compose
func TestMonadComposeVsCompose(t *testing.T) {
double := N.Mul(2)
increment := func(x int) int { return x + 1 }
increment := N.Add(1)
// MonadCompose takes both functions at once
monadComposed := MonadCompose(double, increment)
@@ -458,7 +458,7 @@ func BenchmarkCompose(b *testing.B) {
// TestComposeVsChain demonstrates the key difference between Compose and Chain
func TestComposeVsChain(t *testing.T) {
double := N.Mul(2)
increment := func(x int) int { return x + 1 }
increment := N.Add(1)
// Compose executes RIGHT-TO-LEFT
// Compose(double, increment) means: increment first, then double
@@ -722,3 +722,352 @@ func TestChainFirst(t *testing.T) {
// But side effect should have been executed with double's result
assert.Equal(t, 10, sideEffect, "ChainFirst should execute second function for effect")
}
// TestBuild tests the Build function
func TestBuild(t *testing.T) {
t.Run("build with single transformation", func(t *testing.T) {
// Build applies endomorphism to zero value
result := Build(double)
assert.Equal(t, 0, result, "Build(double) on zero value should be 0")
})
t.Run("build with composed transformations", func(t *testing.T) {
// Create a builder that starts from zero and applies transformations
builder := M.ConcatAll(Monoid[int]())([]Endomorphism[int]{
N.Add(10),
N.Mul(2),
N.Add(5),
})
result := Build(builder)
// RIGHT-TO-LEFT: 0 + 5 = 5, 5 * 2 = 10, 10 + 10 = 20
assert.Equal(t, 20, result, "Build should apply composed transformations to zero value")
})
t.Run("build with identity", func(t *testing.T) {
result := Build(Identity[int]())
assert.Equal(t, 0, result, "Build(identity) should return zero value")
})
t.Run("build string from empty", func(t *testing.T) {
builder := M.ConcatAll(Monoid[string]())([]Endomorphism[string]{
func(s string) string { return s + "Hello" },
func(s string) string { return s + " " },
func(s string) string { return s + "World" },
})
result := Build(builder)
// RIGHT-TO-LEFT: "" + "World" = "World", "World" + " " = "World ", "World " + "Hello" = "World Hello"
assert.Equal(t, "World Hello", result, "Build should work with strings")
})
t.Run("build struct with builder pattern", func(t *testing.T) {
type Config struct {
Host string
Port int
}
withHost := func(host string) Endomorphism[Config] {
return func(c Config) Config {
c.Host = host
return c
}
}
withPort := func(port int) Endomorphism[Config] {
return func(c Config) Config {
c.Port = port
return c
}
}
builder := M.ConcatAll(Monoid[Config]())([]Endomorphism[Config]{
withHost("localhost"),
withPort(8080),
})
result := Build(builder)
assert.Equal(t, "localhost", result.Host, "Build should set Host")
assert.Equal(t, 8080, result.Port, "Build should set Port")
})
t.Run("build slice with operations", func(t *testing.T) {
type IntSlice []int
appendValue := func(v int) Endomorphism[IntSlice] {
return func(s IntSlice) IntSlice {
return append(s, v)
}
}
builder := M.ConcatAll(Monoid[IntSlice]())([]Endomorphism[IntSlice]{
appendValue(1),
appendValue(2),
appendValue(3),
})
result := Build(builder)
// RIGHT-TO-LEFT: append 3, append 2, append 1
assert.Equal(t, IntSlice{3, 2, 1}, result, "Build should construct slice")
})
}
// TestBuildAsBuilderPattern demonstrates using Build as a builder pattern
func TestBuildAsBuilderPattern(t *testing.T) {
type Person struct {
Name string
Age int
Email string
Active bool
}
// Define builder functions
withName := func(name string) Endomorphism[Person] {
return func(p Person) Person {
p.Name = name
return p
}
}
withAge := func(age int) Endomorphism[Person] {
return func(p Person) Person {
p.Age = age
return p
}
}
withEmail := func(email string) Endomorphism[Person] {
return func(p Person) Person {
p.Email = email
return p
}
}
withActive := func(active bool) Endomorphism[Person] {
return func(p Person) Person {
p.Active = active
return p
}
}
// Build a person using the builder pattern
personBuilder := M.ConcatAll(Monoid[Person]())([]Endomorphism[Person]{
withName("Alice"),
withAge(30),
withEmail("alice@example.com"),
withActive(true),
})
person := Build(personBuilder)
assert.Equal(t, "Alice", person.Name)
assert.Equal(t, 30, person.Age)
assert.Equal(t, "alice@example.com", person.Email)
assert.True(t, person.Active)
}
// TestConcatAll tests the ConcatAll function
func TestConcatAll(t *testing.T) {
t.Run("concat all with multiple endomorphisms", func(t *testing.T) {
// ConcatAll executes RIGHT-TO-LEFT
combined := ConcatAll([]Endomorphism[int]{double, increment, square})
result := combined(5)
// RIGHT-TO-LEFT: square(5) = 25, increment(25) = 26, double(26) = 52
assert.Equal(t, 52, result, "ConcatAll should execute right-to-left")
})
t.Run("concat all with empty slice", func(t *testing.T) {
// Empty slice should return identity
identity := ConcatAll([]Endomorphism[int]{})
result := identity(42)
assert.Equal(t, 42, result, "ConcatAll with empty slice should return identity")
})
t.Run("concat all with single endomorphism", func(t *testing.T) {
combined := ConcatAll([]Endomorphism[int]{double})
result := combined(5)
assert.Equal(t, 10, result, "ConcatAll with single endomorphism should apply it")
})
t.Run("concat all with two endomorphisms", func(t *testing.T) {
// RIGHT-TO-LEFT: increment first, then double
combined := ConcatAll([]Endomorphism[int]{double, increment})
result := combined(5)
assert.Equal(t, 12, result, "ConcatAll should execute right-to-left: (5 + 1) * 2 = 12")
})
t.Run("concat all with strings", func(t *testing.T) {
appendHello := func(s string) string { return s + "Hello" }
appendSpace := func(s string) string { return s + " " }
appendWorld := func(s string) string { return s + "World" }
// RIGHT-TO-LEFT execution
combined := ConcatAll([]Endomorphism[string]{appendHello, appendSpace, appendWorld})
result := combined("")
// RIGHT-TO-LEFT: "" + "World" = "World", "World" + " " = "World ", "World " + "Hello" = "World Hello"
assert.Equal(t, "World Hello", result, "ConcatAll should work with strings")
})
t.Run("concat all for building structs", func(t *testing.T) {
type Config struct {
Host string
Port int
}
withHost := func(host string) Endomorphism[Config] {
return func(c Config) Config {
c.Host = host
return c
}
}
withPort := func(port int) Endomorphism[Config] {
return func(c Config) Config {
c.Port = port
return c
}
}
combined := ConcatAll([]Endomorphism[Config]{
withHost("localhost"),
withPort(8080),
})
result := combined(Config{})
assert.Equal(t, "localhost", result.Host)
assert.Equal(t, 8080, result.Port)
})
t.Run("concat all is equivalent to monoid ConcatAll", func(t *testing.T) {
endos := []Endomorphism[int]{double, increment, square}
result1 := ConcatAll(endos)(5)
result2 := M.ConcatAll(Monoid[int]())(endos)(5)
assert.Equal(t, result1, result2, "ConcatAll should be equivalent to M.ConcatAll(Monoid())")
})
}
// TestReduce tests the Reduce function
func TestReduce(t *testing.T) {
t.Run("reduce with multiple endomorphisms", func(t *testing.T) {
// Reduce executes LEFT-TO-RIGHT starting from zero value
result := Reduce([]Endomorphism[int]{double, increment, square})
// LEFT-TO-RIGHT: 0 -> double(0) = 0 -> increment(0) = 1 -> square(1) = 1
assert.Equal(t, 1, result, "Reduce should execute left-to-right from zero value")
})
t.Run("reduce with empty slice", func(t *testing.T) {
// Empty slice should return zero value
result := Reduce([]Endomorphism[int]{})
assert.Equal(t, 0, result, "Reduce with empty slice should return zero value")
})
t.Run("reduce with single endomorphism", func(t *testing.T) {
addTen := N.Add(10)
result := Reduce([]Endomorphism[int]{addTen})
// 0 + 10 = 10
assert.Equal(t, 10, result, "Reduce with single endomorphism should apply it to zero")
})
t.Run("reduce with sequential transformations", func(t *testing.T) {
addTen := N.Add(10)
// LEFT-TO-RIGHT: 0 -> addTen(0) = 10 -> double(10) = 20 -> increment(20) = 21
result := Reduce([]Endomorphism[int]{addTen, double, increment})
assert.Equal(t, 21, result, "Reduce should apply transformations left-to-right")
})
t.Run("reduce with strings", func(t *testing.T) {
appendHello := func(s string) string { return s + "Hello" }
appendSpace := func(s string) string { return s + " " }
appendWorld := func(s string) string { return s + "World" }
// LEFT-TO-RIGHT execution
result := Reduce([]Endomorphism[string]{appendHello, appendSpace, appendWorld})
// "" -> "Hello" -> "Hello " -> "Hello World"
assert.Equal(t, "Hello World", result, "Reduce should work with strings left-to-right")
})
t.Run("reduce for building structs", func(t *testing.T) {
type Settings struct {
Theme string
FontSize int
}
withTheme := func(theme string) Endomorphism[Settings] {
return func(s Settings) Settings {
s.Theme = theme
return s
}
}
withFontSize := func(size int) Endomorphism[Settings] {
return func(s Settings) Settings {
s.FontSize = size
return s
}
}
// LEFT-TO-RIGHT application
result := Reduce([]Endomorphism[Settings]{
withTheme("dark"),
withFontSize(14),
})
assert.Equal(t, "dark", result.Theme)
assert.Equal(t, 14, result.FontSize)
})
t.Run("reduce is equivalent to Build(ConcatAll(reverse))", func(t *testing.T) {
addTen := N.Add(10)
endos := []Endomorphism[int]{addTen, double, increment}
// Reduce applies left-to-right
result1 := Reduce(endos)
// Reverse and use ConcatAll (which is right-to-left)
reversed := []Endomorphism[int]{increment, double, addTen}
result2 := Build(ConcatAll(reversed))
assert.Equal(t, result1, result2, "Reduce should be equivalent to Build(ConcatAll(reverse))")
})
}
// TestConcatAllVsReduce demonstrates the difference between ConcatAll and Reduce
func TestConcatAllVsReduce(t *testing.T) {
addTen := N.Add(10)
endos := []Endomorphism[int]{addTen, double, increment}
// ConcatAll: RIGHT-TO-LEFT composition, returns endomorphism
concatResult := ConcatAll(endos)(5)
// 5 -> increment(5) = 6 -> double(6) = 12 -> addTen(12) = 22
// Reduce: LEFT-TO-RIGHT application, returns value from zero
reduceResult := Reduce(endos)
// 0 -> addTen(0) = 10 -> double(10) = 20 -> increment(20) = 21
assert.NotEqual(t, concatResult, reduceResult, "ConcatAll and Reduce should produce different results")
assert.Equal(t, 22, concatResult, "ConcatAll should execute right-to-left on input value")
assert.Equal(t, 21, reduceResult, "Reduce should execute left-to-right from zero value")
}
// TestReduceWithBuild demonstrates using Reduce vs Build with ConcatAll
func TestReduceWithBuild(t *testing.T) {
addFive := N.Add(5)
multiplyByThree := N.Mul(3)
endos := []Endomorphism[int]{addFive, multiplyByThree}
// Reduce: LEFT-TO-RIGHT from zero
reduceResult := Reduce(endos)
// 0 -> addFive(0) = 5 -> multiplyByThree(5) = 15
assert.Equal(t, 15, reduceResult)
// Build with ConcatAll: RIGHT-TO-LEFT from zero
buildResult := Build(ConcatAll(endos))
// 0 -> multiplyByThree(0) = 0 -> addFive(0) = 5
assert.Equal(t, 5, buildResult)
assert.NotEqual(t, reduceResult, buildResult, "Reduce and Build(ConcatAll) produce different results due to execution order")
}

View File

@@ -1,3 +1,18 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package endomorphism
import (
@@ -5,6 +20,63 @@ import (
S "github.com/IBM/fp-go/v2/semigroup"
)
// FromSemigroup converts a semigroup into a Kleisli arrow for endomorphisms.
//
// This function takes a semigroup and returns a Kleisli arrow that, when given
// a value of type A, produces an endomorphism that concatenates that value with
// other values using the semigroup's Concat operation.
//
// The resulting Kleisli arrow has the signature: func(A) Endomorphism[A]
// When called with a value 'x', it returns an endomorphism that concatenates
// 'x' with its input using the semigroup's binary operation.
//
// # Data Last Principle
//
// FromSemigroup follows the "data last" principle by using function.Bind2of2,
// which binds the second parameter of the semigroup's Concat operation.
// This means that for a semigroup with Concat(a, b), calling FromSemigroup(s)(x)
// creates an endomorphism that computes Concat(input, x), where the input data
// comes first and the bound value 'x' comes last.
//
// For example, with string concatenation:
// - Semigroup.Concat("Hello", "World") = "HelloWorld"
// - FromSemigroup(semigroup)("World") creates: func(input) = Concat(input, "World")
// - Applying it: endomorphism("Hello") = Concat("Hello", "World") = "HelloWorld"
//
// This is particularly useful for creating endomorphisms from associative operations
// like string concatenation, number addition, list concatenation, etc.
//
// Parameters:
// - s: A semigroup providing the Concat operation for type A
//
// Returns:
// - A Kleisli arrow that converts values of type A into endomorphisms
//
// Example:
//
// import (
// "github.com/IBM/fp-go/v2/endomorphism"
// "github.com/IBM/fp-go/v2/semigroup"
// )
//
// // Create a semigroup for integer addition
// addSemigroup := semigroup.MakeSemigroup(func(a, b int) int {
// return a + b
// })
//
// // Convert it to a Kleisli arrow
// addKleisli := endomorphism.FromSemigroup(addSemigroup)
//
// // Use the Kleisli arrow to create an endomorphism that adds 5
// // This follows "data last": the input data comes first, 5 comes last
// addFive := addKleisli(5)
//
// // Apply the endomorphism: Concat(10, 5) = 10 + 5 = 15
// result := addFive(10) // result is 15
//
// The function uses function.Bind2of2 to partially apply the semigroup's Concat
// operation, effectively currying it to create the desired Kleisli arrow while
// maintaining the "data last" principle.
func FromSemigroup[A any](s S.Semigroup[A]) Kleisli[A] {
return function.Bind2of2(s.Concat)
}

View File

@@ -0,0 +1,439 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package endomorphism
import (
"testing"
S "github.com/IBM/fp-go/v2/semigroup"
"github.com/stretchr/testify/assert"
)
// TestFromSemigroup tests the FromSemigroup function with various semigroups
func TestFromSemigroup(t *testing.T) {
t.Run("integer addition semigroup", func(t *testing.T) {
// Create a semigroup for integer addition
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
// Convert to Kleisli arrow
addKleisli := FromSemigroup(addSemigroup)
// Create an endomorphism that adds 5
addFive := addKleisli(5)
// Test the endomorphism
assert.Equal(t, 15, addFive(10), "addFive(10) should equal 15")
assert.Equal(t, 5, addFive(0), "addFive(0) should equal 5")
assert.Equal(t, -5, addFive(-10), "addFive(-10) should equal -5")
})
t.Run("integer multiplication semigroup", func(t *testing.T) {
// Create a semigroup for integer multiplication
mulSemigroup := S.MakeSemigroup(func(a, b int) int {
return a * b
})
// Convert to Kleisli arrow
mulKleisli := FromSemigroup(mulSemigroup)
// Create an endomorphism that multiplies by 3
multiplyByThree := mulKleisli(3)
// Test the endomorphism
assert.Equal(t, 15, multiplyByThree(5), "multiplyByThree(5) should equal 15")
assert.Equal(t, 0, multiplyByThree(0), "multiplyByThree(0) should equal 0")
assert.Equal(t, -9, multiplyByThree(-3), "multiplyByThree(-3) should equal -9")
})
t.Run("string concatenation semigroup", func(t *testing.T) {
// Create a semigroup for string concatenation
concatSemigroup := S.MakeSemigroup(func(a, b string) string {
return a + b
})
// Convert to Kleisli arrow
concatKleisli := FromSemigroup(concatSemigroup)
// Create an endomorphism that appends "Hello, " (input is on the left)
appendHello := concatKleisli("Hello, ")
// Test the endomorphism - input is concatenated on the left, "Hello, " on the right
assert.Equal(t, "WorldHello, ", appendHello("World"), "appendHello('World') should equal 'WorldHello, '")
assert.Equal(t, "Hello, ", appendHello(""), "appendHello('') should equal 'Hello, '")
assert.Equal(t, "GoHello, ", appendHello("Go"), "appendHello('Go') should equal 'GoHello, '")
})
t.Run("slice concatenation semigroup", func(t *testing.T) {
// Create a semigroup for slice concatenation
sliceSemigroup := S.MakeSemigroup(func(a, b []int) []int {
result := make([]int, len(a)+len(b))
copy(result, a)
copy(result[len(a):], b)
return result
})
// Convert to Kleisli arrow
sliceKleisli := FromSemigroup(sliceSemigroup)
// Create an endomorphism that appends [1, 2] (input is on the left)
appendOneTwo := sliceKleisli([]int{1, 2})
// Test the endomorphism - input is concatenated on the left, [1,2] on the right
result1 := appendOneTwo([]int{3, 4, 5})
assert.Equal(t, []int{3, 4, 5, 1, 2}, result1, "appendOneTwo([3,4,5]) should equal [3,4,5,1,2]")
result2 := appendOneTwo([]int{})
assert.Equal(t, []int{1, 2}, result2, "appendOneTwo([]) should equal [1,2]")
result3 := appendOneTwo([]int{10})
assert.Equal(t, []int{10, 1, 2}, result3, "appendOneTwo([10]) should equal [10,1,2]")
})
t.Run("max semigroup", func(t *testing.T) {
// Create a semigroup for max operation
maxSemigroup := S.MakeSemigroup(func(a, b int) int {
if a > b {
return a
}
return b
})
// Convert to Kleisli arrow
maxKleisli := FromSemigroup(maxSemigroup)
// Create an endomorphism that takes max with 10
maxWithTen := maxKleisli(10)
// Test the endomorphism
assert.Equal(t, 15, maxWithTen(15), "maxWithTen(15) should equal 15")
assert.Equal(t, 10, maxWithTen(5), "maxWithTen(5) should equal 10")
assert.Equal(t, 10, maxWithTen(10), "maxWithTen(10) should equal 10")
assert.Equal(t, 10, maxWithTen(-5), "maxWithTen(-5) should equal 10")
})
t.Run("min semigroup", func(t *testing.T) {
// Create a semigroup for min operation
minSemigroup := S.MakeSemigroup(func(a, b int) int {
if a < b {
return a
}
return b
})
// Convert to Kleisli arrow
minKleisli := FromSemigroup(minSemigroup)
// Create an endomorphism that takes min with 10
minWithTen := minKleisli(10)
// Test the endomorphism
assert.Equal(t, 5, minWithTen(5), "minWithTen(5) should equal 5")
assert.Equal(t, 10, minWithTen(15), "minWithTen(15) should equal 10")
assert.Equal(t, 10, minWithTen(10), "minWithTen(10) should equal 10")
assert.Equal(t, -5, minWithTen(-5), "minWithTen(-5) should equal -5")
})
}
// TestFromSemigroupComposition tests that endomorphisms created from semigroups can be composed
func TestFromSemigroupComposition(t *testing.T) {
t.Run("compose addition endomorphisms", func(t *testing.T) {
// Create a semigroup for integer addition
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
// Create two endomorphisms
addFive := addKleisli(5)
addTen := addKleisli(10)
// Compose them (RIGHT-TO-LEFT execution)
composed := MonadCompose(addFive, addTen)
// Test composition: addTen first, then addFive
result := composed(3) // 3 + 10 = 13, then 13 + 5 = 18
assert.Equal(t, 18, result, "composed addition should work correctly")
})
t.Run("compose string endomorphisms", func(t *testing.T) {
// Create a semigroup for string concatenation
concatSemigroup := S.MakeSemigroup(func(a, b string) string {
return a + b
})
concatKleisli := FromSemigroup(concatSemigroup)
// Create two endomorphisms
appendHello := concatKleisli("Hello, ")
appendExclamation := concatKleisli("!")
// Compose them (RIGHT-TO-LEFT execution)
composed := MonadCompose(appendHello, appendExclamation)
// Test composition: appendExclamation first, then appendHello
// "World" + "!" = "World!", then "World!" + "Hello, " = "World!Hello, "
result := composed("World")
assert.Equal(t, "World!Hello, ", result, "composed string operations should work correctly")
})
}
// TestFromSemigroupWithMonoid tests using FromSemigroup-created endomorphisms with monoid operations
func TestFromSemigroupWithMonoid(t *testing.T) {
t.Run("monoid concat with addition endomorphisms", func(t *testing.T) {
// Create a semigroup for integer addition
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
// Create multiple endomorphisms
addOne := addKleisli(1)
addTwo := addKleisli(2)
addThree := addKleisli(3)
// Use monoid to combine them
monoid := Monoid[int]()
combined := monoid.Concat(monoid.Concat(addOne, addTwo), addThree)
// Test: RIGHT-TO-LEFT execution: addThree, then addTwo, then addOne
result := combined(10) // 10 + 3 = 13, 13 + 2 = 15, 15 + 1 = 16
assert.Equal(t, 16, result, "monoid combination should work correctly")
})
}
// TestFromSemigroupAssociativity tests that the semigroup associativity is preserved
func TestFromSemigroupAssociativity(t *testing.T) {
t.Run("addition associativity", func(t *testing.T) {
// Create a semigroup for integer addition
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
// Create three endomorphisms
addTwo := addKleisli(2)
addThree := addKleisli(3)
addFive := addKleisli(5)
// Test associativity: (a . b) . c = a . (b . c)
left := MonadCompose(MonadCompose(addTwo, addThree), addFive)
right := MonadCompose(addTwo, MonadCompose(addThree, addFive))
testValue := 10
assert.Equal(t, left(testValue), right(testValue), "composition should be associative")
// Both should equal: 10 + 5 + 3 + 2 = 20
assert.Equal(t, 20, left(testValue), "left composition should equal 20")
assert.Equal(t, 20, right(testValue), "right composition should equal 20")
})
t.Run("string concatenation associativity", func(t *testing.T) {
// Create a semigroup for string concatenation
concatSemigroup := S.MakeSemigroup(func(a, b string) string {
return a + b
})
concatKleisli := FromSemigroup(concatSemigroup)
// Create three endomorphisms
appendA := concatKleisli("A")
appendB := concatKleisli("B")
appendC := concatKleisli("C")
// Test associativity: (a . b) . c = a . (b . c)
left := MonadCompose(MonadCompose(appendA, appendB), appendC)
right := MonadCompose(appendA, MonadCompose(appendB, appendC))
testValue := "X"
assert.Equal(t, left(testValue), right(testValue), "string composition should be associative")
// Both should equal: "X" + "C" + "B" + "A" = "XCBA" (RIGHT-TO-LEFT composition)
assert.Equal(t, "XCBA", left(testValue), "left composition should equal 'XCBA'")
assert.Equal(t, "XCBA", right(testValue), "right composition should equal 'XCBA'")
})
}
// TestFromSemigroupEdgeCases tests edge cases and boundary conditions
func TestFromSemigroupEdgeCases(t *testing.T) {
t.Run("zero values", func(t *testing.T) {
// Test with addition and zero
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
addZero := addKleisli(0)
assert.Equal(t, 5, addZero(5), "adding zero should not change the value")
assert.Equal(t, 0, addZero(0), "adding zero to zero should be zero")
assert.Equal(t, -3, addZero(-3), "adding zero to negative should not change")
})
t.Run("empty string", func(t *testing.T) {
// Test with string concatenation and empty string
concatSemigroup := S.MakeSemigroup(func(a, b string) string {
return a + b
})
concatKleisli := FromSemigroup(concatSemigroup)
prependEmpty := concatKleisli("")
assert.Equal(t, "hello", prependEmpty("hello"), "prepending empty string should not change")
assert.Equal(t, "", prependEmpty(""), "prepending empty to empty should be empty")
})
t.Run("empty slice", func(t *testing.T) {
// Test with slice concatenation and empty slice
sliceSemigroup := S.MakeSemigroup(func(a, b []int) []int {
result := make([]int, len(a)+len(b))
copy(result, a)
copy(result[len(a):], b)
return result
})
sliceKleisli := FromSemigroup(sliceSemigroup)
prependEmpty := sliceKleisli([]int{})
result := prependEmpty([]int{1, 2, 3})
assert.Equal(t, []int{1, 2, 3}, result, "prepending empty slice should not change")
emptyResult := prependEmpty([]int{})
assert.Equal(t, []int{}, emptyResult, "prepending empty to empty should be empty")
})
}
// TestFromSemigroupDataLastPrinciple explicitly tests that FromSemigroup follows the "data last" principle
func TestFromSemigroupDataLastPrinciple(t *testing.T) {
t.Run("data last with string concatenation", func(t *testing.T) {
// Create a semigroup for string concatenation
// Concat(a, b) = a + b
concatSemigroup := S.MakeSemigroup(func(a, b string) string {
return a + b
})
// FromSemigroup uses Bind2of2, which binds the second parameter
// So FromSemigroup(s)(x) creates: func(input) = Concat(input, x)
// This is "data last" - the input data comes first, bound value comes last
kleisli := FromSemigroup(concatSemigroup)
// Bind "World" as the second parameter
appendWorld := kleisli("World")
// When we call appendWorld("Hello"), it computes Concat("Hello", "World")
// The input "Hello" is the first parameter (data), "World" is the second (bound value)
result := appendWorld("Hello")
assert.Equal(t, "HelloWorld", result, "Data last: Concat(input='Hello', bound='World') = 'HelloWorld'")
// Verify with different input
result2 := appendWorld("Goodbye")
assert.Equal(t, "GoodbyeWorld", result2, "Data last: Concat(input='Goodbye', bound='World') = 'GoodbyeWorld'")
})
t.Run("data last with integer addition", func(t *testing.T) {
// Create a semigroup for integer addition
// Concat(a, b) = a + b
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
// FromSemigroup binds the second parameter
// So FromSemigroup(s)(5) creates: func(input) = Concat(input, 5) = input + 5
kleisli := FromSemigroup(addSemigroup)
// Bind 5 as the second parameter
addFive := kleisli(5)
// When we call addFive(10), it computes Concat(10, 5) = 10 + 5 = 15
// The input 10 is the first parameter (data), 5 is the second (bound value)
result := addFive(10)
assert.Equal(t, 15, result, "Data last: Concat(input=10, bound=5) = 15")
})
t.Run("data last with non-commutative operation", func(t *testing.T) {
// Create a semigroup for a non-commutative operation to clearly show order
// Concat(a, b) = a - b (subtraction is not commutative)
subSemigroup := S.MakeSemigroup(func(a, b int) int {
return a - b
})
// FromSemigroup binds the second parameter
// So FromSemigroup(s)(5) creates: func(input) = Concat(input, 5) = input - 5
kleisli := FromSemigroup(subSemigroup)
// Bind 5 as the second parameter
subtractFive := kleisli(5)
// When we call subtractFive(10), it computes Concat(10, 5) = 10 - 5 = 5
// The input 10 is the first parameter (data), 5 is the second (bound value)
result := subtractFive(10)
assert.Equal(t, 5, result, "Data last: Concat(input=10, bound=5) = 10 - 5 = 5")
// If it were "data first" (binding first parameter), we would get:
// Concat(5, 10) = 5 - 10 = -5, which is NOT what we get
assert.NotEqual(t, -5, result, "Not data first: result is NOT Concat(bound=5, input=10) = 5 - 10 = -5")
})
t.Run("data last with list concatenation", func(t *testing.T) {
// Create a semigroup for list concatenation
// Concat(a, b) = a ++ b
listSemigroup := S.MakeSemigroup(func(a, b []int) []int {
result := make([]int, len(a)+len(b))
copy(result, a)
copy(result[len(a):], b)
return result
})
// FromSemigroup binds the second parameter
// So FromSemigroup(s)([3,4]) creates: func(input) = Concat(input, [3,4])
kleisli := FromSemigroup(listSemigroup)
// Bind [3, 4] as the second parameter
appendThreeFour := kleisli([]int{3, 4})
// When we call appendThreeFour([1,2]), it computes Concat([1,2], [3,4]) = [1,2,3,4]
// The input [1,2] is the first parameter (data), [3,4] is the second (bound value)
result := appendThreeFour([]int{1, 2})
assert.Equal(t, []int{1, 2, 3, 4}, result, "Data last: Concat(input=[1,2], bound=[3,4]) = [1,2,3,4]")
})
}
// BenchmarkFromSemigroup benchmarks the FromSemigroup function
func BenchmarkFromSemigroup(b *testing.B) {
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
addFive := addKleisli(5)
b.ResetTimer()
for b.Loop() {
_ = addFive(10)
}
}
// BenchmarkFromSemigroupComposition benchmarks composed endomorphisms from semigroups
func BenchmarkFromSemigroupComposition(b *testing.B) {
addSemigroup := S.MakeSemigroup(func(a, b int) int {
return a + b
})
addKleisli := FromSemigroup(addSemigroup)
addFive := addKleisli(5)
addTen := addKleisli(10)
composed := MonadCompose(addFive, addTen)
b.ResetTimer()
for b.Loop() {
_ = composed(3)
}
}

View File

@@ -104,7 +104,7 @@ func Identity[A any]() Endomorphism[A] {
//
// sg := endomorphism.Semigroup[int]()
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // Combine using the semigroup (RIGHT-TO-LEFT execution)
// combined := sg.Concat(double, increment)
@@ -140,7 +140,7 @@ func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
//
// monoid := endomorphism.Monoid[int]()
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
// square := func(x int) int { return x * x }
//
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)

View File

@@ -30,7 +30,7 @@ type (
//
// // Simple endomorphisms on integers
// double := N.Mul(2)
// increment := func(x int) int { return x + 1 }
// increment := N.Add(1)
//
// // Both are endomorphisms of type Endomorphism[int]
// var f endomorphism.Endomorphism[int] = double

View File

@@ -72,9 +72,7 @@ func TestFromStrictEquals(t *testing.T) {
func TestFromEquals(t *testing.T) {
t.Run("case-insensitive string equality", func(t *testing.T) {
caseInsensitiveEq := FromEquals(func(a, b string) bool {
return strings.EqualFold(a, b)
})
caseInsensitiveEq := FromEquals(strings.EqualFold)
assert.True(t, caseInsensitiveEq.Equals("hello", "HELLO"))
assert.True(t, caseInsensitiveEq.Equals("Hello", "hello"))
@@ -243,9 +241,7 @@ func TestContramap(t *testing.T) {
})
t.Run("case-insensitive name comparison", func(t *testing.T) {
caseInsensitiveEq := FromEquals(func(a, b string) bool {
return strings.EqualFold(a, b)
})
caseInsensitiveEq := FromEquals(strings.EqualFold)
personEqByNameCI := Contramap(func(p Person) string {
return p.Name

View File

@@ -53,7 +53,10 @@ func Identity[A any](a A) A {
//
// getMessage := Constant("Hello")
// msg := getMessage() // "Hello"
//
//go:inline
func Constant[A any](a A) func() A {
//go:inline
return func() A {
return a
}
@@ -81,7 +84,10 @@ func Constant[A any](a A) func() A {
//
// defaultName := Constant1[int, string]("Unknown")
// name := defaultName(42) // "Unknown"
//
//go:inline
func Constant1[B, A any](a A) func(B) A {
//go:inline
return func(_ B) A {
return a
}
@@ -107,7 +113,10 @@ func Constant1[B, A any](a A) func(B) A {
//
// alwaysTrue := Constant2[int, string, bool](true)
// result := alwaysTrue(42, "test") // true
//
//go:inline
func Constant2[B, C, A any](a A) func(B, C) A {
//go:inline
return func(_ B, _ C) A {
return a
}
@@ -128,6 +137,8 @@ func Constant2[B, C, A any](a A) func(B, C) A {
//
// value := 42
// IsNil(&value) // false
//
//go:inline
func IsNil[A any](a *A) bool {
return a == nil
}
@@ -149,6 +160,8 @@ func IsNil[A any](a *A) bool {
//
// value := 42
// IsNonNil(&value) // true
//
//go:inline
func IsNonNil[A any](a *A) bool {
return a != nil
}
@@ -207,6 +220,8 @@ func Swap[T1, T2, R any](f func(T1, T2) R) func(T2, T1) R {
//
// result := First(42, "hello") // 42
// result := First(true, 100) // true
//
//go:inline
func First[T1, T2 any](t1 T1, _ T2) T1 {
return t1
}
@@ -231,6 +246,14 @@ func First[T1, T2 any](t1 T1, _ T2) T1 {
//
// result := Second(42, "hello") // "hello"
// result := Second(true, 100) // 100
//
//go:inline
func Second[T1, T2 any](_ T1, t2 T2) T2 {
return t2
}
// Zero returns the zero value of the given type.
func Zero[A comparable]() A {
var zero A
return zero
}

View File

@@ -117,9 +117,13 @@ func Nullary2[F1 ~func() T1, F2 ~func(T1) T2, T1, T2 any](f1 F1, f2 F2) func() T
// Curry2 takes a function with 2 parameters and returns a cascade of functions each taking only one parameter.
// The inverse function is [Uncurry2]
//go:inline
func Curry2[FCT ~func(T0, T1) T2, T0, T1, T2 any](f FCT) func(T0) func(T1) T2 {
//go:inline
return func(t0 T0) func(t1 T1) T2 {
//go:inline
return func(t1 T1) T2 {
//go:inline
return f(t0, t1)
}
}

View File

@@ -36,7 +36,7 @@ package function
// Example:
//
// isPositive := func(n int) bool { return n > 0 }
// double := func(n int) int { return n * 2 }
// double := N.Mul(2)
// negate := func(n int) int { return -n }
//
// transform := Ternary(isPositive, double, negate)
@@ -51,7 +51,7 @@ package function
// )
// result := classify(5) // "positive"
// result2 := classify(-3) // "non-positive"
func Ternary[A, B any](pred func(A) bool, onTrue func(A) B, onFalse func(A) B) func(A) B {
func Ternary[A, B any](pred func(A) bool, onTrue, onFalse func(A) B) func(A) B {
return func(a A) B {
if pred(a) {
return onTrue(a)

View File

@@ -246,7 +246,7 @@ func (builder *Builder) GetTargetURL() Result[string] {
parseQuery,
result.Map(F.Flow2(
F.Curry2(FM.ValuesMonoid.Concat)(builder.GetQuery()),
(url.Values).Encode,
url.Values.Encode,
)),
),
),
@@ -351,13 +351,13 @@ func Header(name string) Lens[*Builder, Option[string]] {
LZ.Map(delHeader(name)),
)
return L.MakeLens(get, func(b *Builder, value Option[string]) *Builder {
return L.MakeLensWithName(get, func(b *Builder, value Option[string]) *Builder {
cpy := b.clone()
return F.Pipe1(
value,
O.Fold(del(cpy), set(cpy)),
)
})
}, fmt.Sprintf("HttpHeader[%s]", name))
}
// WithHeader creates a [Endomorphism] for a certain header

View File

@@ -16,6 +16,18 @@
/*
Package identity implements the Identity monad, the simplest possible monad.
# Fantasy Land Specification
This implementation corresponds to the Fantasy Land Identity type:
https://github.com/fantasyland/fantasy-land
Implemented Fantasy Land algebras:
- Functor: https://github.com/fantasyland/fantasy-land#functor
- Apply: https://github.com/fantasyland/fantasy-land#apply
- Applicative: https://github.com/fantasyland/fantasy-land#applicative
- Chain: https://github.com/fantasyland/fantasy-land#chain
- Monad: https://github.com/fantasyland/fantasy-land#monad
# Overview
The Identity monad is a trivial monad that simply wraps a value without adding
@@ -107,8 +119,8 @@ Chain for sequential composition:
// Chain multiple operations
result := F.Pipe2(
10,
identity.Chain(func(n int) int { return n * 2 }),
identity.Chain(func(n int) int { return n + 5 }),
identity.Chain(N.Mul(2)),
identity.Chain(N.Add(5)),
)
// result is 25
@@ -177,8 +189,8 @@ Convert tuples of Identity values:
// Traverse with transformation
tuple := T.MakeTuple2(1, 2)
result := identity.TraverseTuple2(
func(n int) int { return n * 2 },
func(n int) int { return n * 3 },
N.Mul(2),
N.Mul(3),
)(tuple)
// result is T.Tuple2[int, int]{2, 6}
@@ -211,7 +223,7 @@ Example of generic code:
) M {
return F.Pipe2(
monad.Of(value),
monad.Map(func(n int) int { return n * 2 }),
monad.Map(N.Mul(2)),
monad.Map(func(n int) string { return fmt.Sprintf("%d", n) }),
)
}

View File

@@ -17,10 +17,13 @@ package identity
import (
"fmt"
"strconv"
"testing"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/utils"
N "github.com/IBM/fp-go/v2/number"
S "github.com/IBM/fp-go/v2/string"
T "github.com/IBM/fp-go/v2/tuple"
"github.com/stretchr/testify/assert"
)
@@ -51,17 +54,15 @@ func TestMap(t *testing.T) {
})
t.Run("transforms string", func(t *testing.T) {
result := F.Pipe1("hello", Map(func(s string) int {
return len(s)
}))
result := F.Pipe1("hello", Map(S.Size))
assert.Equal(t, 5, result)
})
t.Run("chains multiple maps", func(t *testing.T) {
result := F.Pipe2(
5,
Map(func(n int) int { return n * 2 }),
Map(func(n int) int { return n + 3 }),
Map(N.Mul(2)),
Map(N.Add(3)),
)
assert.Equal(t, 13, result)
})
@@ -69,14 +70,12 @@ func TestMap(t *testing.T) {
func TestMonadMap(t *testing.T) {
t.Run("transforms value", func(t *testing.T) {
result := MonadMap(10, func(n int) int { return n * 3 })
result := MonadMap(10, N.Mul(3))
assert.Equal(t, 30, result)
})
t.Run("changes type", func(t *testing.T) {
result := MonadMap(42, func(n int) string {
return fmt.Sprintf("Number: %d", n)
})
result := MonadMap(42, S.Format[int]("Number: %d"))
assert.Equal(t, "Number: 42", result)
})
}
@@ -109,23 +108,21 @@ func TestChain(t *testing.T) {
t.Run("chains multiple operations", func(t *testing.T) {
result := F.Pipe2(
10,
Chain(func(n int) int { return n * 2 }),
Chain(func(n int) int { return n + 5 }),
Chain(N.Mul(2)),
Chain(N.Add(5)),
)
assert.Equal(t, 25, result)
})
t.Run("changes type", func(t *testing.T) {
result := F.Pipe1(5, Chain(func(n int) string {
return fmt.Sprintf("Value: %d", n)
}))
result := F.Pipe1(5, Chain(S.Format[int]("Value: %d")))
assert.Equal(t, "Value: 5", result)
})
}
func TestMonadChain(t *testing.T) {
t.Run("chains computation", func(t *testing.T) {
result := MonadChain(7, func(n int) int { return n * 7 })
result := MonadChain(7, N.Mul(7))
assert.Equal(t, 49, result)
})
}
@@ -148,7 +145,7 @@ func TestChainFirst(t *testing.T) {
result := F.Pipe2(
10,
ChainFirst(func(n int) string { return "ignored" }),
Map(func(n int) int { return n * 2 }),
Map(N.Mul(2)),
)
assert.Equal(t, 20, result)
})
@@ -156,9 +153,7 @@ func TestChainFirst(t *testing.T) {
func TestMonadChainFirst(t *testing.T) {
t.Run("keeps original value", func(t *testing.T) {
result := MonadChainFirst(100, func(n int) string {
return fmt.Sprintf("%d", n)
})
result := MonadChainFirst(100, strconv.Itoa)
assert.Equal(t, 100, result)
})
}
@@ -170,17 +165,13 @@ func TestAp(t *testing.T) {
})
t.Run("applies curried function", func(t *testing.T) {
add := func(a int) func(int) int {
return func(b int) int { return a + b }
}
add := N.Add[int]
result := F.Pipe1(add(10), Ap[int](5))
assert.Equal(t, 15, result)
})
t.Run("changes type", func(t *testing.T) {
toString := func(n int) string {
return fmt.Sprintf("Number: %d", n)
}
toString := S.Format[int]("Number: %d")
result := F.Pipe1(toString, Ap[string](42))
assert.Equal(t, "Number: 42", result)
})
@@ -188,22 +179,22 @@ func TestAp(t *testing.T) {
func TestMonadAp(t *testing.T) {
t.Run("applies function to value", func(t *testing.T) {
result := MonadAp(func(n int) int { return n * 3 }, 7)
result := MonadAp(N.Mul(3), 7)
assert.Equal(t, 21, result)
})
}
func TestFlap(t *testing.T) {
t.Run("flips application", func(t *testing.T) {
double := func(n int) int { return n * 2 }
double := N.Mul(2)
result := F.Pipe1(double, Flap[int](5))
assert.Equal(t, 10, result)
})
t.Run("with multiple functions", func(t *testing.T) {
funcs := []func(int) int{
func(n int) int { return n * 2 },
func(n int) int { return n + 10 },
N.Mul(2),
N.Add(10),
func(n int) int { return n * n },
}
@@ -218,9 +209,7 @@ func TestFlap(t *testing.T) {
func TestMonadFlap(t *testing.T) {
t.Run("applies value to function", func(t *testing.T) {
result := MonadFlap(func(n int) string {
return fmt.Sprintf("Value: %d", n)
}, 42)
result := MonadFlap(S.Format[int]("Value: %d"), 42)
assert.Equal(t, "Value: 42", result)
})
}
@@ -391,8 +380,8 @@ func TestTraverseTuple(t *testing.T) {
t.Run("TraverseTuple2", func(t *testing.T) {
tuple := T.MakeTuple2(1, 2)
result := TraverseTuple2(
func(n int) int { return n * 2 },
func(n int) int { return n * 3 },
N.Mul(2),
N.Mul(3),
)(tuple)
assert.Equal(t, T.MakeTuple2(2, 6), result)
})
@@ -400,7 +389,7 @@ func TestTraverseTuple(t *testing.T) {
t.Run("TraverseTuple3", func(t *testing.T) {
tuple := T.MakeTuple3(1, 2, 3)
result := TraverseTuple3(
func(n int) int { return n + 10 },
N.Add(10),
func(n int) int { return n + 20 },
func(n int) int { return n + 30 },
)(tuple)
@@ -426,15 +415,11 @@ func TestMonad(t *testing.T) {
assert.Equal(t, 42, value)
// Test Map
mapped := m.Map(func(n int) string {
return fmt.Sprintf("Number: %d", n)
})(value)
mapped := m.Map(S.Format[int]("Number: %d"))(value)
assert.Equal(t, "Number: 42", mapped)
// Test Chain
chained := m.Chain(func(n int) string {
return fmt.Sprintf("Value: %d", n)
})(value)
chained := m.Chain(S.Format[int]("Value: %d"))(value)
assert.Equal(t, "Value: 42", chained)
// Test Ap
@@ -450,7 +435,7 @@ func TestMonadLaws(t *testing.T) {
t.Run("left identity", func(t *testing.T) {
// Of(a).Chain(f) === f(a)
a := 42
f := func(n int) int { return n * 2 }
f := N.Mul(2)
left := F.Pipe1(Of(a), Chain(f))
right := f(a)
@@ -470,8 +455,8 @@ func TestMonadLaws(t *testing.T) {
t.Run("associativity", func(t *testing.T) {
// m.Chain(f).Chain(g) === m.Chain(x => f(x).Chain(g))
m := 5
f := func(n int) int { return n * 2 }
g := func(n int) int { return n + 10 }
f := N.Mul(2)
g := N.Add(10)
left := F.Pipe2(m, Chain(f), Chain(g))
right := F.Pipe1(m, Chain(func(x int) int {
@@ -496,8 +481,8 @@ func TestFunctorLaws(t *testing.T) {
t.Run("composition", func(t *testing.T) {
// Map(f).Map(g) === Map(g ∘ f)
value := 5
f := func(n int) int { return n * 2 }
g := func(n int) int { return n + 10 }
f := N.Mul(2)
g := N.Add(10)
left := F.Pipe2(value, Map(f), Map(g))
right := F.Pipe1(value, Map(F.Flow2(f, g)))
@@ -541,7 +526,7 @@ func TestTraverseTuple4(t *testing.T) {
t.Run("traverses tuple4", func(t *testing.T) {
tuple := T.MakeTuple4(1, 2, 3, 4)
result := TraverseTuple4(
func(n int) int { return n + 10 },
N.Add(10),
func(n int) int { return n + 20 },
func(n int) int { return n + 30 },
func(n int) int { return n + 40 },
@@ -570,8 +555,8 @@ func TestTraverseTuple5(t *testing.T) {
tuple := T.MakeTuple5(1, 2, 3, 4, 5)
result := TraverseTuple5(
func(n int) int { return n * 1 },
func(n int) int { return n * 2 },
func(n int) int { return n * 3 },
N.Mul(2),
N.Mul(3),
func(n int) int { return n * 4 },
func(n int) int { return n * 5 },
)(tuple)
@@ -598,11 +583,11 @@ func TestTraverseTuple6(t *testing.T) {
t.Run("traverses tuple6", func(t *testing.T) {
tuple := T.MakeTuple6(1, 2, 3, 4, 5, 6)
result := TraverseTuple6(
func(n int) int { return n + 1 },
N.Add(1),
func(n int) int { return n + 2 },
func(n int) int { return n + 3 },
N.Add(3),
func(n int) int { return n + 4 },
func(n int) int { return n + 5 },
N.Add(5),
func(n int) int { return n + 6 },
)(tuple)
assert.Equal(t, T.MakeTuple6(2, 4, 6, 8, 10, 12), result)
@@ -691,15 +676,15 @@ func TestTraverseTuple9(t *testing.T) {
t.Run("traverses tuple9", func(t *testing.T) {
tuple := T.MakeTuple9(1, 2, 3, 4, 5, 6, 7, 8, 9)
result := TraverseTuple9(
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
func(n int) int { return n + 1 },
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
N.Add(1),
)(tuple)
assert.Equal(t, T.MakeTuple9(2, 3, 4, 5, 6, 7, 8, 9, 10), result)
})
@@ -724,16 +709,16 @@ func TestTraverseTuple10(t *testing.T) {
t.Run("traverses tuple10", func(t *testing.T) {
tuple := T.MakeTuple10(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
result := TraverseTuple10(
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
func(n int) int { return n * 2 },
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
N.Mul(2),
)(tuple)
assert.Equal(t, T.MakeTuple10(2, 4, 6, 8, 10, 12, 14, 16, 18, 20), result)
})

View File

@@ -0,0 +1,226 @@
# Idiomatic Package Review Summary
**Date:** 2025-11-26
**Reviewer:** Code Review Assistant
## Overview
This document summarizes the comprehensive review of the `idiomatic` package and its subpackages, including documentation fixes, additions, and test coverage analysis.
## Documentation Improvements
### 1. Main Package (`idiomatic/`)
-**Status:** Documentation is comprehensive and well-structured
- **File:** `doc.go` (505 lines)
- **Quality:** Excellent - includes overview, performance comparisons, usage examples, and best practices
### 2. Option Package (`idiomatic/option/`)
-**Fixed:** Added missing copyright headers to `types.go` and `function.go`
-**Fixed:** Added comprehensive documentation for type aliases in `types.go`
-**Fixed:** Enhanced function documentation in `function.go` with examples
-**Fixed:** Added missing documentation for `FromZero`, `FromNonZero`, and `FromEq` functions
- **Files Updated:**
- `types.go` - Added copyright header and type documentation
- `function.go` - Added copyright header and improved function docs
- `option.go` - Enhanced documentation for utility functions
### 3. Result Package (`idiomatic/result/`)
-**Fixed:** Added missing copyright header to `function.go`
-**Fixed:** Enhanced function documentation with examples
- **Files Updated:**
- `function.go` - Added copyright header and improved documentation
- `types.go` - Already had good documentation
### 4. IOResult Package (`idiomatic/ioresult/`)
-**Status:** Documentation is comprehensive
- **File:** `doc.go` (198 lines)
- **Quality:** Excellent - includes detailed explanations of IO operations, lazy evaluation, and side effects
### 5. ReaderIOResult Package (`idiomatic/readerioresult/`)
-**Created:** New `doc.go` file (96 lines)
-**Fixed:** Added comprehensive type documentation to `types.go`
- **New Documentation Includes:**
- Package overview and use cases
- Basic usage examples
- Composition patterns
- Error handling strategies
- Relationship to other monads
### 6. ReaderResult Package (`idiomatic/readerresult/`)
-**Fixed:** Added comprehensive type documentation to `types.go`
- **Existing:** `doc.go` already present (178 lines) with excellent documentation
## Test Coverage Analysis
### Option Package Tests
**File:** `idiomatic/option/option_test.go`
**Existing Coverage:**
-`IsNone` - Tested
-`IsSome` - Tested
-`Map` - Tested
-`Ap` - Tested
-`Chain` - Tested
-`ChainTo` - Comprehensive tests with multiple scenarios
**Missing Tests (Commented Out):**
- ⚠️ `Flatten` - Test commented out
- ⚠️ `Fold` - Test commented out
- ⚠️ `FromPredicate` - Test commented out
- ⚠️ `Alt` - Test commented out
**Recommendations:**
1. Uncomment and fix the commented-out tests
2. Add tests for:
- `FromZero`
- `FromNonZero`
- `FromEq`
- `FromNillable`
- `MapTo`
- `GetOrElse`
- `ChainFirst`
- `Reduce`
- `Filter`
- `Flap`
- `ToString`
### Result Package Tests
**File:** `idiomatic/result/either_test.go`
**Existing Coverage:**
-`IsLeft` - Tested
-`IsRight` - Tested
-`Map` - Tested
-`Ap` - Tested
-`Alt` - Tested
-`ChainFirst` - Tested
-`ChainOptionK` - Tested
-`FromOption` - Tested
-`ToString` - Tested
**Missing Tests:**
- ⚠️ `Of` - Not explicitly tested
- ⚠️ `BiMap` - Not tested
- ⚠️ `MapTo` - Not tested
- ⚠️ `MapLeft` - Not tested
- ⚠️ `Chain` - Not tested
- ⚠️ `ChainTo` - Not tested
- ⚠️ `ToOption` - Not tested
- ⚠️ `FromError` - Not tested
- ⚠️ `ToError` - Not tested
- ⚠️ `Fold` - Not tested
- ⚠️ `FromPredicate` - Not tested
- ⚠️ `FromNillable` - Not tested
- ⚠️ `GetOrElse` - Not tested
- ⚠️ `Reduce` - Not tested
- ⚠️ `OrElse` - Not tested
- ⚠️ `ToType` - Not tested
- ⚠️ `Memoize` - Not tested
- ⚠️ `Flap` - Not tested
### IOResult Package Tests
**File:** `idiomatic/ioresult/monad_test.go`
**Existing Coverage:****EXCELLENT**
- ✅ Comprehensive monad law tests (left identity, right identity, associativity)
- ✅ Functor law tests (composition, identity)
- ✅ Pointed, Functor, and Monad interface tests
- ✅ Parallel vs Sequential execution tests
- ✅ Integration tests with complex pipelines
- ✅ Error handling scenarios
**Status:** This package has exemplary test coverage and can serve as a model for other packages.
### ReaderIOResult Package
**Status:** ⚠️ **NO TESTS FOUND**
**Recommendations:**
Create comprehensive test suite covering:
- Basic construction and execution
- Map, Chain, Ap operations
- Error handling
- Environment dependency injection
- Integration with IOResult
### ReaderResult Package
**Files:** Multiple test files exist
- `array_test.go`
- `bind_test.go`
- `curry_test.go`
- `from_test.go`
- `monoid_test.go`
- `reader_test.go`
- `sequence_test.go`
**Status:** ✅ Good coverage exists
## Subpackages Review
### Packages Requiring Review:
1. **idiomatic/option/number/** - Needs documentation and test review
2. **idiomatic/option/testing/** - Contains disabled test files (`laws_test._go`, `laws._go`)
3. **idiomatic/result/exec/** - Needs review
4. **idiomatic/result/http/** - Needs review
5. **idiomatic/result/testing/** - Contains disabled test files
6. **idiomatic/ioresult/exec/** - Needs review
7. **idiomatic/ioresult/file/** - Needs review
8. **idiomatic/ioresult/http/** - Needs review
9. **idiomatic/ioresult/http/builder/** - Needs review
10. **idiomatic/ioresult/testing/** - Needs review
## Priority Recommendations
### High Priority
1. **Enable Commented Tests:** Uncomment and fix tests in `option/option_test.go`
2. **Add Missing Option Tests:** Create tests for all untested functions in option package
3. **Add Missing Result Tests:** Create comprehensive test suite for result package
4. **Create ReaderIOResult Tests:** This package has no tests at all
### Medium Priority
5. **Review Subpackages:** Systematically review exec, file, http, and testing subpackages
6. **Enable Testing Package Tests:** Investigate why `laws_test._go` files are disabled
### Low Priority
7. **Benchmark Tests:** Consider adding benchmark tests for performance-critical operations
8. **Property-Based Tests:** Consider adding property-based tests using testing/quick
## Files Modified in This Review
1. `idiomatic/option/types.go` - Added copyright and documentation
2. `idiomatic/option/function.go` - Added copyright and enhanced docs
3. `idiomatic/option/option.go` - Enhanced function documentation
4. `idiomatic/result/function.go` - Added copyright and enhanced docs
5. `idiomatic/readerioresult/doc.go` - **CREATED NEW FILE**
6. `idiomatic/readerioresult/types.go` - Added comprehensive type docs
7. `idiomatic/readerresult/types.go` - Added comprehensive type docs
## Summary Statistics
- **Packages Reviewed:** 6 main packages
- **Documentation Files Created:** 1 (readerioresult/doc.go)
- **Files Modified:** 7
- **Lines of Documentation Added:** ~150+
- **Test Coverage Status:**
- ✅ Excellent: ioresult
- ✅ Good: readerresult
- ⚠️ Needs Improvement: option, result
- ⚠️ Missing: readerioresult
## Next Steps
1. Create missing unit tests for option package functions
2. Create missing unit tests for result package functions
3. Create complete test suite for readerioresult package
4. Review and document subpackages (exec, file, http, testing, number)
5. Investigate and potentially enable disabled test files in testing subpackages
6. Consider adding integration tests that demonstrate real-world usage patterns
## Conclusion
The idiomatic package has excellent documentation at the package level, with comprehensive explanations of concepts, usage patterns, and performance characteristics. The main areas for improvement are:
1. **Test Coverage:** Several functions lack unit tests, particularly in option and result packages
2. **Subpackage Documentation:** Some subpackages need documentation review
3. **Disabled Tests:** Some test files are disabled and should be investigated
The IOResult package serves as an excellent example of comprehensive testing, including monad law verification and integration tests. This approach should be replicated across other packages.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,75 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package readerresult
import (
RR "github.com/IBM/fp-go/v2/idiomatic/readerresult"
)
// TraverseArray applies a ReaderResult-returning function to each element of an array,
// collecting the results. If any element fails, the entire operation fails with the first error.
//
// Example:
//
// parseUser := func(id int) readerresult.ReaderResult[DB, User] { ... }
// ids := []int{1, 2, 3}
// result := readerresult.TraverseArray[DB](parseUser)(ids)
// // result(db) returns ([]User, nil) with all users or (nil, error) on first error
//
//go:inline
func TraverseArray[A, B any](f Kleisli[A, B]) Kleisli[[]A, []B] {
return RR.TraverseArray(f)
}
//go:inline
func MonadTraverseArray[A, B any](as []A, f Kleisli[A, B]) ReaderResult[[]B] {
return RR.MonadTraverseArray(as, f)
}
// TraverseArrayWithIndex is like TraverseArray but the function also receives the element's index.
// This is useful when the transformation depends on the position in the array.
//
// Example:
//
// processItem := func(idx int, item string) readerresult.ReaderResult[Config, int] {
// return readerresult.Of[Config](idx + len(item))
// }
// items := []string{"a", "bb", "ccc"}
// result := readerresult.TraverseArrayWithIndex[Config](processItem)(items)
//
//go:inline
func TraverseArrayWithIndex[A, B any](f func(int, A) ReaderResult[B]) Kleisli[[]A, []B] {
return RR.TraverseArrayWithIndex(f)
}
// SequenceArray converts an array of ReaderResult values into a single ReaderResult of an array.
// If any element fails, the entire operation fails with the first error encountered.
// All computations share the same environment.
//
// Example:
//
// readers := []readerresult.ReaderResult[Config, int]{
// readerresult.Of[Config](1),
// readerresult.Of[Config](2),
// readerresult.Of[Config](3),
// }
// result := readerresult.SequenceArray(readers)
// // result(cfg) returns ([]int{1, 2, 3}, nil)
//
//go:inline
func SequenceArray[A any](ma []ReaderResult[A]) ReaderResult[[]A] {
return RR.SequenceArray(ma)
}

Some files were not shown because too many files have changed in this diff Show More