1
0
mirror of https://github.com/IBM/fp-go.git synced 2025-12-09 23:11:40 +02:00

Compare commits

...

12 Commits

Author SHA1 Message Date
Dr. Carsten Leue
b70e481e7d fix: some minor improvements
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-13 12:56:51 +01:00
Dr. Carsten Leue
3c3bb7c166 fix: improve lens implementation
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-13 12:15:52 +01:00
Dr. Carsten Leue
d3007cbbfa fix: improve lens generator
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-13 09:39:18 +01:00
Dr. Carsten Leue
5aa0e1ea2e fix: handle non comparable types
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-13 09:35:56 +01:00
Dr. Carsten Leue
d586428cb0 fix: examples
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-13 09:05:57 +01:00
Dr. Carsten Leue
d2dbce6e8b fix: improve lens handling
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 18:23:57 +01:00
Dr. Carsten Leue
6f7ec0768d fix: improve lens generation
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 17:28:20 +01:00
Dr. Carsten Leue
ca813b673c fix: better tests and doc
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 16:24:12 +01:00
Dr. Carsten Leue
af271e7d10 fix: better endo and lens
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 15:03:55 +01:00
Dr. Carsten Leue
567315a31c fix: make a distinction between Chain and Compose for endomorphism
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 13:51:00 +01:00
Dr. Carsten Leue
311ed55f06 fix: add Read method to Readers
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 11:59:20 +01:00
Dr. Carsten Leue
23333ce52c doc: improve doc
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 11:08:18 +01:00
70 changed files with 9384 additions and 1160 deletions

View File

@@ -2,25 +2,152 @@
[![Go Reference](https://pkg.go.dev/badge/github.com/IBM/fp-go/v2.svg)](https://pkg.go.dev/github.com/IBM/fp-go/v2)
[![Coverage Status](https://coveralls.io/repos/github/IBM/fp-go/badge.svg?branch=main&flag=v2)](https://coveralls.io/github/IBM/fp-go?branch=main)
[![Go Report Card](https://goreportcard.com/badge/github.com/IBM/fp-go/v2)](https://goreportcard.com/report/github.com/IBM/fp-go/v2)
Version 2 of fp-go leverages [generic type aliases](https://github.com/golang/go/issues/46477) introduced in Go 1.24, providing a more ergonomic and streamlined API.
**fp-go** is a comprehensive functional programming library for Go, bringing type-safe functional patterns inspired by [fp-ts](https://gcanti.github.io/fp-ts/) to the Go ecosystem. Version 2 leverages [generic type aliases](https://github.com/golang/go/issues/46477) introduced in Go 1.24, providing a more ergonomic and streamlined API.
## 📚 Table of Contents
- [Overview](#-overview)
- [Features](#-features)
- [Requirements](#-requirements)
- [Breaking Changes](#-breaking-changes)
- [Installation](#-installation)
- [Quick Start](#-quick-start)
- [Breaking Changes](#️-breaking-changes)
- [Key Improvements](#-key-improvements)
- [Migration Guide](#-migration-guide)
- [Installation](#-installation)
- [What's New](#-whats-new)
- [Documentation](#-documentation)
- [Contributing](#-contributing)
- [License](#-license)
## 🎯 Overview
fp-go brings the power of functional programming to Go with:
- **Type-safe abstractions** - Monads, Functors, Applicatives, and more
- **Composable operations** - Build complex logic from simple, reusable functions
- **Error handling** - Elegant error management with `Either`, `Result`, and `IOEither`
- **Lazy evaluation** - Control when and how computations execute
- **Optics** - Powerful lens, prism, and traversal operations for immutable data manipulation
## ✨ Features
- 🔒 **Type Safety** - Leverage Go's generics for compile-time guarantees
- 🧩 **Composability** - Chain operations naturally with functional composition
- 📦 **Rich Type System** - `Option`, `Either`, `Result`, `IO`, `Reader`, and more
- 🎯 **Practical** - Designed for real-world Go applications
- 🚀 **Performance** - Zero-cost abstractions where possible
- 📖 **Well-documented** - Comprehensive API documentation and examples
- 🧪 **Battle-tested** - Extensive test coverage
## 🔧 Requirements
- **Go 1.24 or later** (for generic type alias support)
## ⚠️ Breaking Changes
## 📦 Installation
### 1. Generic Type Aliases
```bash
go get github.com/IBM/fp-go/v2
```
## 🚀 Quick Start
### Working with Option
```go
package main
import (
"fmt"
"github.com/IBM/fp-go/v2/option"
)
func main() {
// Create an Option
some := option.Some(42)
none := option.None[int]()
// Map over values
doubled := option.Map(func(x int) int { return x * 2 })(some)
fmt.Println(option.GetOrElse(0)(doubled)) // Output: 84
// Chain operations
result := option.Chain(func(x int) option.Option[string] {
if x > 0 {
return option.Some(fmt.Sprintf("Positive: %d", x))
}
return option.None[string]()
})(some)
fmt.Println(option.GetOrElse("No value")(result)) // Output: Positive: 42
}
```
### Error Handling with Result
```go
package main
import (
"errors"
"fmt"
"github.com/IBM/fp-go/v2/result"
)
func divide(a, b int) result.Result[int] {
if b == 0 {
return result.Error[int](errors.New("division by zero"))
}
return result.Ok(a / b)
}
func main() {
res := divide(10, 2)
// Pattern match on the result
result.Fold(
func(err error) { fmt.Println("Error:", err) },
func(val int) { fmt.Println("Result:", val) },
)(res)
// Output: Result: 5
// Or use GetOrElse for a default value
value := result.GetOrElse(0)(divide(10, 0))
fmt.Println("Value:", value) // Output: Value: 0
}
```
### Composing IO Operations
```go
package main
import (
"fmt"
"github.com/IBM/fp-go/v2/io"
)
func main() {
// Define pure IO operations
readInput := io.MakeIO(func() string {
return "Hello, fp-go!"
})
// Transform the result
uppercase := io.Map(func(s string) string {
return fmt.Sprintf(">>> %s <<<", s)
})(readInput)
// Execute the IO operation
result := uppercase()
fmt.Println(result) // Output: >>> Hello, fp-go! <<<
}
```
### From V1 to V2
#### 1. Generic Type Aliases
V2 uses [generic type aliases](https://github.com/golang/go/issues/46477) which require Go 1.24+. This is the most significant change and enables cleaner type definitions.
@@ -34,7 +161,7 @@ type ReaderIOEither[R, E, A any] RD.Reader[R, IOE.IOEither[E, A]]
type ReaderIOEither[R, E, A any] = RD.Reader[R, IOE.IOEither[E, A]]
```
### 2. Generic Type Parameter Ordering
#### 2. Generic Type Parameter Ordering
Type parameters that **cannot** be inferred from function arguments now come first, improving type inference.
@@ -52,7 +179,7 @@ func Ap[B, R, E, A any](fa ReaderIOEither[R, E, A]) func(ReaderIOEither[R, E, fu
This change allows the Go compiler to infer more types automatically, reducing the need for explicit type parameters.
### 3. Pair Monad Semantics
#### 3. Pair Monad Semantics
Monadic operations for `Pair` now operate on the **second argument** to align with the [Haskell definition](https://hackage.haskell.org/package/TypeCompose-0.9.14/docs/Data-Pair.html).
@@ -70,6 +197,36 @@ pair := MakePair(1, "hello")
result := Map(func(s string) string { return s + "!" })(pair) // Pair(1, "hello!")
```
#### 4. Endomorphism Compose Semantics
The `Compose` function for endomorphisms now follows **mathematical function composition** (right-to-left execution), aligning with standard functional programming conventions.
**V1:**
```go
// Compose executed left-to-right
double := func(x int) int { return x * 2 }
increment := func(x int) int { return x + 1 }
composed := Compose(double, increment)
result := composed(5) // (5 * 2) + 1 = 11
```
**V2:**
```go
// Compose executes RIGHT-TO-LEFT (mathematical composition)
double := func(x int) int { return x * 2 }
increment := func(x int) int { return x + 1 }
composed := Compose(double, increment)
result := composed(5) // (5 + 1) * 2 = 12
// Use MonadChain for LEFT-TO-RIGHT execution
chained := MonadChain(double, increment)
result2 := chained(5) // (5 * 2) + 1 = 11
```
**Key Difference:**
- `Compose(f, g)` now means `f ∘ g`, which applies `g` first, then `f` (right-to-left)
- `MonadChain(f, g)` applies `f` first, then `g` (left-to-right)
## ✨ Key Improvements
### 1. Simplified Type Declarations
@@ -91,16 +248,16 @@ func processData(input string) ET.Either[error, OPT.Option[int]] {
**V2 Approach:**
```go
import (
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/result"
"github.com/IBM/fp-go/v2/option"
)
// Define type aliases once
type Either[A any] = either.Either[error, A]
type Result[A any] = result.Result[A]
type Option[A any] = option.Option[A]
// Use them throughout your codebase
func processData(input string) Either[Option[int]] {
func processData(input string) Result[Option[int]] {
// implementation
}
```
@@ -230,20 +387,14 @@ Create project-wide type aliases for common patterns:
package myapp
import (
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/result"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/ioeither"
"github.com/IBM/fp-go/v2/ioresult"
)
type Either[A any] = either.Either[error, A]
type Result[A any] = result.Result[A]
type Option[A any] = option.Option[A]
type IOEither[A any] = ioeither.IOEither[error, A]
```
## 📦 Installation
```bash
go get github.com/IBM/fp-go/v2
type IOResult[A any] = ioresult.IOResult[A]
```
## 🆕 What's New
@@ -277,25 +428,37 @@ func process() IOET.IOEither[error, string] {
**V2 Simplified Example:**
```go
import (
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/ioeither"
"strconv"
"github.com/IBM/fp-go/v2/ioresult"
)
type IOEither[A any] = ioeither.IOEither[error, A]
type IOResult[A any] = ioresult.IOResult[A]
func process() IOEither[string] {
return ioeither.Map(
func process() IOResult[string] {
return ioresult.Map(
strconv.Itoa,
)(fetchData())
}
```
## 📚 Additional Resources
## 📚 Documentation
- [Main README](../README.md) - Core concepts and design philosophy
- [API Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2)
- [Code Samples](../samples/)
- [Go 1.24 Release Notes](https://tip.golang.org/doc/go1.24)
- **[API Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2)** - Complete API reference
- **[Code Samples](./samples/)** - Practical examples and use cases
- **[Go 1.24 Release Notes](https://tip.golang.org/doc/go1.24)** - Information about generic type aliases
### Core Modules
- **Option** - Represent optional values without nil
- **Either** - Type-safe error handling with left/right values
- **Result** - Simplified Either with error as left type
- **IO** - Lazy evaluation and side effect management
- **IOEither** - Combine IO with error handling
- **Reader** - Dependency injection pattern
- **ReaderIOEither** - Combine Reader, IO, and Either for complex workflows
- **Array** - Functional array operations
- **Record** - Functional record/map operations
- **Optics** - Lens, Prism, Optional, and Traversal for immutable updates
## 🤔 Should I Migrate?
@@ -310,10 +473,25 @@ func process() IOEither[string] {
- ⚠️ Migration effort outweighs benefits for your project
- ⚠️ You need stability in production (V2 is newer)
## 🤝 Contributing
Contributions are welcome! Here's how you can help:
1. **Report bugs** - Open an issue with a clear description and reproduction steps
2. **Suggest features** - Share your ideas for improvements
3. **Submit PRs** - Fix bugs or add features (please discuss major changes first)
4. **Improve docs** - Help make the documentation clearer and more comprehensive
Please read our contribution guidelines before submitting pull requests.
## 🐛 Issues and Feedback
Found a bug or have a suggestion? Please [open an issue](https://github.com/IBM/fp-go/issues) on GitHub.
## 📄 License
This project is licensed under the Apache License 2.0 - see the LICENSE file for details.
This project is licensed under the Apache License 2.0. See the [LICENSE](https://github.com/IBM/fp-go/blob/main/LICENSE) file for details.
---
**Made with ❤️ by IBM**

View File

@@ -15,14 +15,163 @@
package bytes
// Empty returns an empty byte slice.
//
// This function returns the identity element for the byte slice Monoid,
// which is an empty byte slice. It's useful as a starting point for
// building byte slices or as a default value.
//
// Returns:
// - An empty byte slice ([]byte{})
//
// Properties:
// - Empty() is the identity element for Monoid.Concat
// - Monoid.Concat(Empty(), x) == x
// - Monoid.Concat(x, Empty()) == x
//
// Example - Basic usage:
//
// empty := Empty()
// fmt.Println(len(empty)) // 0
//
// Example - As identity element:
//
// data := []byte("hello")
// result1 := Monoid.Concat(Empty(), data) // []byte("hello")
// result2 := Monoid.Concat(data, Empty()) // []byte("hello")
//
// Example - Building byte slices:
//
// // Start with empty and build up
// buffer := Empty()
// buffer = Monoid.Concat(buffer, []byte("Hello"))
// buffer = Monoid.Concat(buffer, []byte(" "))
// buffer = Monoid.Concat(buffer, []byte("World"))
// // buffer: []byte("Hello World")
//
// See also:
// - Monoid.Empty(): Alternative way to get empty byte slice
// - ConcatAll(): For concatenating multiple byte slices
func Empty() []byte {
return Monoid.Empty()
}
// ToString converts a byte slice to a string.
//
// This function performs a direct conversion from []byte to string.
// The conversion creates a new string with a copy of the byte data.
//
// Parameters:
// - a: The byte slice to convert
//
// Returns:
// - A string containing the same data as the byte slice
//
// Performance Note:
//
// This conversion allocates a new string. For performance-critical code
// that needs to avoid allocations, consider using unsafe.String (Go 1.20+)
// or working directly with byte slices.
//
// Example - Basic conversion:
//
// bytes := []byte("hello")
// str := ToString(bytes)
// fmt.Println(str) // "hello"
//
// Example - Converting binary data:
//
// // ASCII codes for "Hello"
// data := []byte{0x48, 0x65, 0x6c, 0x6c, 0x6f}
// str := ToString(data)
// fmt.Println(str) // "Hello"
//
// Example - Empty byte slice:
//
// empty := Empty()
// str := ToString(empty)
// fmt.Println(str == "") // true
//
// Example - UTF-8 encoded text:
//
// utf8Bytes := []byte("Hello, 世界")
// str := ToString(utf8Bytes)
// fmt.Println(str) // "Hello, 世界"
//
// Example - Round-trip conversion:
//
// original := "test string"
// bytes := []byte(original)
// result := ToString(bytes)
// fmt.Println(original == result) // true
//
// See also:
// - []byte(string): For converting string to byte slice
// - Size(): For getting the length of a byte slice
func ToString(a []byte) string {
return string(a)
}
// Size returns the number of bytes in a byte slice.
//
// This function returns the length of the byte slice, which is the number
// of bytes it contains. This is equivalent to len(as) but provided as a
// named function for use in functional composition.
//
// Parameters:
// - as: The byte slice to measure
//
// Returns:
// - The number of bytes in the slice
//
// Example - Basic usage:
//
// data := []byte("hello")
// size := Size(data)
// fmt.Println(size) // 5
//
// Example - Empty slice:
//
// empty := Empty()
// size := Size(empty)
// fmt.Println(size) // 0
//
// Example - Binary data:
//
// binary := []byte{0x01, 0x02, 0x03, 0x04}
// size := Size(binary)
// fmt.Println(size) // 4
//
// Example - UTF-8 encoded text:
//
// // Note: Size returns byte count, not character count
// utf8 := []byte("Hello, 世界")
// byteCount := Size(utf8)
// fmt.Println(byteCount) // 13 (not 9 characters)
//
// Example - Using in functional composition:
//
// import "github.com/IBM/fp-go/v2/array"
//
// slices := [][]byte{
// []byte("a"),
// []byte("bb"),
// []byte("ccc"),
// }
//
// // Map to get sizes
// sizes := array.Map(Size)(slices)
// // sizes: []int{1, 2, 3}
//
// Example - Checking if slice is empty:
//
// data := []byte("test")
// isEmpty := Size(data) == 0
// fmt.Println(isEmpty) // false
//
// See also:
// - len(): Built-in function for getting slice length
// - ToString(): For converting byte slice to string
func Size(as []byte) int {
return len(as)
}

View File

@@ -187,6 +187,299 @@ func TestOrd(t *testing.T) {
})
}
// TestOrdProperties tests mathematical properties of Ord
func TestOrdProperties(t *testing.T) {
t.Run("reflexivity: x == x", func(t *testing.T) {
testCases := [][]byte{
[]byte{},
[]byte("a"),
[]byte("test"),
[]byte{0x01, 0x02, 0x03},
}
for _, tc := range testCases {
assert.Equal(t, 0, Ord.Compare(tc, tc),
"Compare(%v, %v) should be 0", tc, tc)
assert.True(t, Ord.Equals(tc, tc),
"Equals(%v, %v) should be true", tc, tc)
}
})
t.Run("antisymmetry: if x <= y and y <= x then x == y", func(t *testing.T) {
testCases := []struct {
a, b []byte
}{
{[]byte("abc"), []byte("abc")},
{[]byte{}, []byte{}},
{[]byte{0x01}, []byte{0x01}},
}
for _, tc := range testCases {
cmp1 := Ord.Compare(tc.a, tc.b)
cmp2 := Ord.Compare(tc.b, tc.a)
if cmp1 <= 0 && cmp2 <= 0 {
assert.True(t, Ord.Equals(tc.a, tc.b),
"If %v <= %v and %v <= %v, they should be equal", tc.a, tc.b, tc.b, tc.a)
}
}
})
t.Run("transitivity: if x <= y and y <= z then x <= z", func(t *testing.T) {
x := []byte("a")
y := []byte("b")
z := []byte("c")
cmpXY := Ord.Compare(x, y)
cmpYZ := Ord.Compare(y, z)
cmpXZ := Ord.Compare(x, z)
if cmpXY <= 0 && cmpYZ <= 0 {
assert.True(t, cmpXZ <= 0,
"If %v <= %v and %v <= %v, then %v <= %v", x, y, y, z, x, z)
}
})
t.Run("totality: either x <= y or y <= x", func(t *testing.T) {
testCases := []struct {
a, b []byte
}{
{[]byte("abc"), []byte("abd")},
{[]byte("xyz"), []byte("abc")},
{[]byte{}, []byte("a")},
{[]byte{0x01}, []byte{0x02}},
}
for _, tc := range testCases {
cmp1 := Ord.Compare(tc.a, tc.b)
cmp2 := Ord.Compare(tc.b, tc.a)
assert.True(t, cmp1 <= 0 || cmp2 <= 0,
"Either %v <= %v or %v <= %v must be true", tc.a, tc.b, tc.b, tc.a)
}
})
}
// TestEdgeCases tests edge cases and boundary conditions
func TestEdgeCases(t *testing.T) {
t.Run("very large byte slices", func(t *testing.T) {
large := make([]byte, 1000000)
for i := range large {
large[i] = byte(i % 256)
}
size := Size(large)
assert.Equal(t, 1000000, size)
str := ToString(large)
assert.Equal(t, 1000000, len(str))
})
t.Run("concatenating many slices", func(t *testing.T) {
slices := make([][]byte, 100)
for i := range slices {
slices[i] = []byte{byte(i)}
}
result := ConcatAll(slices...)
assert.Equal(t, 100, Size(result))
})
t.Run("null bytes in slice", func(t *testing.T) {
data := []byte{0x00, 0x01, 0x00, 0x02}
size := Size(data)
assert.Equal(t, 4, size)
str := ToString(data)
assert.Equal(t, 4, len(str))
})
t.Run("comparing slices with null bytes", func(t *testing.T) {
a := []byte{0x00, 0x01}
b := []byte{0x00, 0x02}
assert.Equal(t, -1, Ord.Compare(a, b))
})
}
// TestMonoidConcatPerformance tests concatenation performance characteristics
func TestMonoidConcatPerformance(t *testing.T) {
t.Run("ConcatAll vs repeated Concat", func(t *testing.T) {
slices := [][]byte{
[]byte("a"),
[]byte("b"),
[]byte("c"),
[]byte("d"),
[]byte("e"),
}
// Using ConcatAll
result1 := ConcatAll(slices...)
// Using repeated Concat
result2 := Monoid.Empty()
for _, s := range slices {
result2 = Monoid.Concat(result2, s)
}
assert.Equal(t, result1, result2)
assert.Equal(t, []byte("abcde"), result1)
})
}
// TestRoundTrip tests round-trip conversions
func TestRoundTrip(t *testing.T) {
t.Run("string to bytes to string", func(t *testing.T) {
original := "Hello, World! 世界"
bytes := []byte(original)
result := ToString(bytes)
assert.Equal(t, original, result)
})
t.Run("bytes to string to bytes", func(t *testing.T) {
original := []byte{0x48, 0x65, 0x6c, 0x6c, 0x6f}
str := ToString(original)
result := []byte(str)
assert.Equal(t, original, result)
})
}
// TestConcatAllVariadic tests ConcatAll with various argument counts
func TestConcatAllVariadic(t *testing.T) {
t.Run("zero arguments", func(t *testing.T) {
result := ConcatAll()
assert.Equal(t, []byte{}, result)
})
t.Run("one argument", func(t *testing.T) {
result := ConcatAll([]byte("test"))
assert.Equal(t, []byte("test"), result)
})
t.Run("two arguments", func(t *testing.T) {
result := ConcatAll([]byte("hello"), []byte("world"))
assert.Equal(t, []byte("helloworld"), result)
})
t.Run("many arguments", func(t *testing.T) {
result := ConcatAll(
[]byte("a"),
[]byte("b"),
[]byte("c"),
[]byte("d"),
[]byte("e"),
[]byte("f"),
[]byte("g"),
[]byte("h"),
[]byte("i"),
[]byte("j"),
)
assert.Equal(t, []byte("abcdefghij"), result)
})
}
// Benchmark tests
func BenchmarkToString(b *testing.B) {
data := []byte("Hello, World!")
b.Run("small", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = ToString(data)
}
})
b.Run("large", func(b *testing.B) {
large := make([]byte, 10000)
for i := range large {
large[i] = byte(i % 256)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = ToString(large)
}
})
}
func BenchmarkSize(b *testing.B) {
data := []byte("Hello, World!")
for i := 0; i < b.N; i++ {
_ = Size(data)
}
}
func BenchmarkMonoidConcat(b *testing.B) {
a := []byte("Hello")
c := []byte(" World")
b.Run("small slices", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Monoid.Concat(a, c)
}
})
b.Run("large slices", func(b *testing.B) {
large1 := make([]byte, 10000)
large2 := make([]byte, 10000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = Monoid.Concat(large1, large2)
}
})
}
func BenchmarkConcatAll(b *testing.B) {
slices := [][]byte{
[]byte("Hello"),
[]byte(" "),
[]byte("World"),
[]byte("!"),
}
b.Run("few slices", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = ConcatAll(slices...)
}
})
b.Run("many slices", func(b *testing.B) {
many := make([][]byte, 100)
for i := range many {
many[i] = []byte{byte(i)}
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = ConcatAll(many...)
}
})
}
func BenchmarkOrdCompare(b *testing.B) {
a := []byte("abc")
c := []byte("abd")
b.Run("equal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Ord.Compare(a, a)
}
})
b.Run("different", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Ord.Compare(a, c)
}
})
b.Run("large slices", func(b *testing.B) {
large1 := make([]byte, 10000)
large2 := make([]byte, 10000)
large2[9999] = 1
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = Ord.Compare(large1, large2)
}
})
}
// Example tests
func ExampleEmpty() {
empty := Empty()
@@ -219,3 +512,17 @@ func ExampleConcatAll() {
// Output:
}
func ExampleMonoid_concat() {
result := Monoid.Concat([]byte("Hello"), []byte(" World"))
println(string(result)) // Hello World
// Output:
}
func ExampleOrd_compare() {
cmp := Ord.Compare([]byte("abc"), []byte("abd"))
println(cmp) // -1 (abc < abd)
// Output:
}

4
v2/bytes/coverage.out Normal file
View File

@@ -0,0 +1,4 @@
mode: set
github.com/IBM/fp-go/v2/bytes/bytes.go:55.21,57.2 1 1
github.com/IBM/fp-go/v2/bytes/bytes.go:111.32,113.2 1 1
github.com/IBM/fp-go/v2/bytes/bytes.go:175.26,177.2 1 1

View File

@@ -23,12 +23,219 @@ import (
)
var (
// monoid for byte arrays
// Monoid is the Monoid instance for byte slices.
//
// This Monoid combines byte slices through concatenation, with an empty
// byte slice as the identity element. It satisfies the monoid laws:
//
// Identity laws:
// - Monoid.Concat(Monoid.Empty(), x) == x (left identity)
// - Monoid.Concat(x, Monoid.Empty()) == x (right identity)
//
// Associativity law:
// - Monoid.Concat(Monoid.Concat(a, b), c) == Monoid.Concat(a, Monoid.Concat(b, c))
//
// Operations:
// - Empty(): Returns an empty byte slice []byte{}
// - Concat(a, b []byte): Concatenates two byte slices
//
// Example - Basic concatenation:
//
// result := Monoid.Concat([]byte("Hello"), []byte(" World"))
// // result: []byte("Hello World")
//
// Example - Identity element:
//
// empty := Monoid.Empty()
// data := []byte("test")
// result1 := Monoid.Concat(empty, data) // []byte("test")
// result2 := Monoid.Concat(data, empty) // []byte("test")
//
// Example - Building byte buffers:
//
// buffer := Monoid.Empty()
// buffer = Monoid.Concat(buffer, []byte("Line 1\n"))
// buffer = Monoid.Concat(buffer, []byte("Line 2\n"))
// buffer = Monoid.Concat(buffer, []byte("Line 3\n"))
//
// Example - Associativity:
//
// a := []byte("a")
// b := []byte("b")
// c := []byte("c")
// left := Monoid.Concat(Monoid.Concat(a, b), c) // []byte("abc")
// right := Monoid.Concat(a, Monoid.Concat(b, c)) // []byte("abc")
// // left == right
//
// See also:
// - ConcatAll: For concatenating multiple byte slices at once
// - Empty(): Convenience function for getting empty byte slice
Monoid = A.Monoid[byte]()
// ConcatAll concatenates all bytes
// ConcatAll efficiently concatenates multiple byte slices into a single slice.
//
// This function takes a variadic number of byte slices and combines them
// into a single byte slice. It pre-allocates the exact amount of memory
// needed, making it more efficient than repeated concatenation.
//
// Parameters:
// - slices: Zero or more byte slices to concatenate
//
// Returns:
// - A new byte slice containing all input slices concatenated in order
//
// Performance:
//
// ConcatAll is more efficient than using Monoid.Concat repeatedly because
// it calculates the total size upfront and allocates memory once, avoiding
// multiple allocations and copies.
//
// Example - Basic usage:
//
// result := ConcatAll(
// []byte("Hello"),
// []byte(" "),
// []byte("World"),
// )
// // result: []byte("Hello World")
//
// Example - Empty input:
//
// result := ConcatAll()
// // result: []byte{}
//
// Example - Single slice:
//
// result := ConcatAll([]byte("test"))
// // result: []byte("test")
//
// Example - Building protocol messages:
//
// import "encoding/binary"
//
// header := []byte{0x01, 0x02}
// length := make([]byte, 4)
// binary.BigEndian.PutUint32(length, 100)
// payload := []byte("data")
// footer := []byte{0xFF}
//
// message := ConcatAll(header, length, payload, footer)
//
// Example - With empty slices:
//
// result := ConcatAll(
// []byte("a"),
// []byte{},
// []byte("b"),
// []byte{},
// []byte("c"),
// )
// // result: []byte("abc")
//
// Example - Building CSV line:
//
// fields := [][]byte{
// []byte("John"),
// []byte("Doe"),
// []byte("30"),
// }
// separator := []byte(",")
//
// // Interleave fields with separators
// parts := [][]byte{
// fields[0], separator,
// fields[1], separator,
// fields[2],
// }
// line := ConcatAll(parts...)
// // line: []byte("John,Doe,30")
//
// See also:
// - Monoid.Concat: For concatenating exactly two byte slices
// - bytes.Join: Standard library function for joining with separator
ConcatAll = A.ArrayConcatAll[byte]
// Ord implements the default ordering on bytes
// Ord is the Ord instance for byte slices providing lexicographic ordering.
//
// This Ord instance compares byte slices lexicographically (dictionary order),
// comparing bytes from left to right until a difference is found or one slice
// ends. It uses the standard library's bytes.Compare and bytes.Equal functions.
//
// Comparison rules:
// - Compares byte-by-byte from left to right
// - First differing byte determines the order
// - Shorter slice is less than longer slice if all bytes match
// - Empty slice is less than any non-empty slice
//
// Operations:
// - Compare(a, b []byte) int: Returns -1 if a < b, 0 if a == b, 1 if a > b
// - Equals(a, b []byte) bool: Returns true if slices are equal
//
// Example - Basic comparison:
//
// cmp := Ord.Compare([]byte("abc"), []byte("abd"))
// // cmp: -1 (abc < abd)
//
// cmp = Ord.Compare([]byte("xyz"), []byte("abc"))
// // cmp: 1 (xyz > abc)
//
// cmp = Ord.Compare([]byte("test"), []byte("test"))
// // cmp: 0 (equal)
//
// Example - Length differences:
//
// cmp := Ord.Compare([]byte("ab"), []byte("abc"))
// // cmp: -1 (shorter is less)
//
// cmp = Ord.Compare([]byte("abc"), []byte("ab"))
// // cmp: 1 (longer is greater)
//
// Example - Empty slices:
//
// cmp := Ord.Compare([]byte{}, []byte("a"))
// // cmp: -1 (empty is less)
//
// cmp = Ord.Compare([]byte{}, []byte{})
// // cmp: 0 (both empty)
//
// Example - Equality check:
//
// equal := Ord.Equals([]byte("test"), []byte("test"))
// // equal: true
//
// equal = Ord.Equals([]byte("test"), []byte("Test"))
// // equal: false (case-sensitive)
//
// Example - Sorting byte slices:
//
// import "github.com/IBM/fp-go/v2/array"
//
// data := [][]byte{
// []byte("zebra"),
// []byte("apple"),
// []byte("mango"),
// }
//
// sorted := array.Sort(Ord)(data)
// // sorted: [[]byte("apple"), []byte("mango"), []byte("zebra")]
//
// Example - Binary data comparison:
//
// cmp := Ord.Compare([]byte{0x01, 0x02}, []byte{0x01, 0x03})
// // cmp: -1 (0x02 < 0x03)
//
// Example - Finding minimum:
//
// import O "github.com/IBM/fp-go/v2/ord"
//
// a := []byte("xyz")
// b := []byte("abc")
// min := O.Min(Ord)(a, b)
// // min: []byte("abc")
//
// See also:
// - bytes.Compare: Standard library comparison function
// - bytes.Equal: Standard library equality function
// - array.Sort: For sorting slices using an Ord instance
Ord = O.MakeOrd(bytes.Compare, bytes.Equal)
)

View File

@@ -53,17 +53,20 @@ var (
// structInfo holds information about a struct that needs lens generation
type structInfo struct {
Name string
Fields []fieldInfo
Imports map[string]string // package path -> alias
Name string
TypeParams string // e.g., "[T any]" or "[K comparable, V any]" - for type declarations
TypeParamNames string // e.g., "[T]" or "[K, V]" - for type usage in function signatures
Fields []fieldInfo
Imports map[string]string // package path -> alias
}
// fieldInfo holds information about a struct field
type fieldInfo struct {
Name string
TypeName string
BaseType string // TypeName without leading * for pointer types
IsOptional bool // true if field is a pointer or has json omitempty tag
Name string
TypeName string
BaseType string // TypeName without leading * for pointer types
IsOptional bool // true if field is a pointer or has json omitempty tag
IsComparable bool // true if the type is comparable (can use ==)
}
// templateData holds data for template rendering
@@ -74,64 +77,95 @@ type templateData struct {
const lensStructTemplate = `
// {{.Name}}Lenses provides lenses for accessing fields of {{.Name}}
type {{.Name}}Lenses struct {
type {{.Name}}Lenses{{.TypeParams}} struct {
// mandatory fields
{{- range .Fields}}
{{.Name}} {{if .IsOptional}}LO.LensO[{{$.Name}}, {{.TypeName}}]{{else}}L.Lens[{{$.Name}}, {{.TypeName}}]{{end}}
{{.Name}} L.Lens[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
// optional fields
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O LO.LensO[{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
{{- end}}
}
// {{.Name}}RefLenses provides lenses for accessing fields of {{.Name}} via a reference to {{.Name}}
type {{.Name}}RefLenses struct {
type {{.Name}}RefLenses{{.TypeParams}} struct {
// mandatory fields
{{- range .Fields}}
{{.Name}} {{if .IsOptional}}LO.LensO[*{{$.Name}}, {{.TypeName}}]{{else}}L.Lens[*{{$.Name}}, {{.TypeName}}]{{end}}
{{.Name}} L.Lens[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
// optional fields
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O LO.LensO[*{{$.Name}}{{$.TypeParamNames}}, {{.TypeName}}]
{{- end}}
{{- end}}
}
`
const lensConstructorTemplate = `
// Make{{.Name}}Lenses creates a new {{.Name}}Lenses with lenses for all fields
func Make{{.Name}}Lenses() {{.Name}}Lenses {
func Make{{.Name}}Lenses{{.TypeParams}}() {{.Name}}Lenses{{.TypeParamNames}} {
// mandatory lenses
{{- range .Fields}}
{{- if .IsOptional}}
iso{{.Name}} := I.FromZero[{{.TypeName}}]()
lens{{.Name}} := L.MakeLens(
func(s {{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s {{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) {{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
)
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
lens{{.Name}}O := LO.FromIso[{{$.Name}}{{$.TypeParamNames}}](IO.FromZero[{{.TypeName}}]())(lens{{.Name}})
{{- end}}
{{- end}}
return {{.Name}}Lenses{
return {{.Name}}Lenses{{.TypeParamNames}}{
// mandatory lenses
{{- range .Fields}}
{{- if .IsOptional}}
{{.Name}}: L.MakeLens(
func(s {{$.Name}}) O.Option[{{.TypeName}}] { return iso{{.Name}}.Get(s.{{.Name}}) },
func(s {{$.Name}}, v O.Option[{{.TypeName}}]) {{$.Name}} { s.{{.Name}} = iso{{.Name}}.ReverseGet(v); return s },
),
{{- else}}
{{.Name}}: L.MakeLens(
func(s {{$.Name}}) {{.TypeName}} { return s.{{.Name}} },
func(s {{$.Name}}, v {{.TypeName}}) {{$.Name}} { s.{{.Name}} = v; return s },
),
{{.Name}}: lens{{.Name}},
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O: lens{{.Name}}O,
{{- end}}
{{- end}}
}
}
// Make{{.Name}}RefLenses creates a new {{.Name}}RefLenses with lenses for all fields
func Make{{.Name}}RefLenses() {{.Name}}RefLenses {
func Make{{.Name}}RefLenses{{.TypeParams}}() {{.Name}}RefLenses{{.TypeParamNames}} {
// mandatory lenses
{{- range .Fields}}
{{- if .IsOptional}}
iso{{.Name}} := I.FromZero[{{.TypeName}}]()
{{- end}}
{{- end}}
return {{.Name}}RefLenses{
{{- range .Fields}}
{{- if .IsOptional}}
{{.Name}}: L.MakeLensRef(
func(s *{{$.Name}}) O.Option[{{.TypeName}}] { return iso{{.Name}}.Get(s.{{.Name}}) },
func(s *{{$.Name}}, v O.Option[{{.TypeName}}]) *{{$.Name}} { s.{{.Name}} = iso{{.Name}}.ReverseGet(v); return s },
),
{{- if .IsComparable}}
lens{{.Name}} := L.MakeLensStrict(
func(s *{{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) *{{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
)
{{- else}}
{{.Name}}: L.MakeLensRef(
func(s *{{$.Name}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}, v {{.TypeName}}) *{{$.Name}} { s.{{.Name}} = v; return s },
),
lens{{.Name}} := L.MakeLensRef(
func(s *{{$.Name}}{{$.TypeParamNames}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}{{$.TypeParamNames}}, v {{.TypeName}}) *{{$.Name}}{{$.TypeParamNames}} { s.{{.Name}} = v; return s },
)
{{- end}}
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
lens{{.Name}}O := LO.FromIso[*{{$.Name}}{{$.TypeParamNames}}](IO.FromZero[{{.TypeName}}]())(lens{{.Name}})
{{- end}}
{{- end}}
return {{.Name}}RefLenses{{.TypeParamNames}}{
// mandatory lenses
{{- range .Fields}}
{{.Name}}: lens{{.Name}},
{{- end}}
// optional lenses
{{- range .Fields}}
{{- if .IsComparable}}
{{.Name}}O: lens{{.Name}}O,
{{- end}}
{{- end}}
}
@@ -257,6 +291,259 @@ func isPointerType(expr ast.Expr) bool {
return ok
}
// isComparableType checks if a type expression represents a comparable type.
// Comparable types in Go include:
// - Basic types (bool, numeric types, string)
// - Pointer types
// - Channel types
// - Interface types
// - Structs where all fields are comparable
// - Arrays where the element type is comparable
//
// Non-comparable types include:
// - Slices
// - Maps
// - Functions
//
// typeParams is a map of type parameter names to their constraints (e.g., "T" -> "any", "K" -> "comparable")
func isComparableType(expr ast.Expr, typeParams map[string]string) bool {
switch t := expr.(type) {
case *ast.Ident:
// Check if this is a type parameter
if constraint, isTypeParam := typeParams[t.Name]; isTypeParam {
// Type parameter - check its constraint
return constraint == "comparable"
}
// Basic types and named types
// We assume named types are comparable unless they're known non-comparable types
name := t.Name
// Known non-comparable built-in types
if name == "error" {
// error is an interface, which is comparable
return true
}
// Most basic types and named types are comparable
// We can't determine if a custom type is comparable without type checking,
// so we assume it is (conservative approach)
return true
case *ast.StarExpr:
// Pointer types are always comparable
return true
case *ast.ArrayType:
// Arrays are comparable if their element type is comparable
if t.Len == nil {
// This is a slice (no length), slices are not comparable
return false
}
// Fixed-size array, check element type
return isComparableType(t.Elt, typeParams)
case *ast.MapType:
// Maps are not comparable
return false
case *ast.FuncType:
// Functions are not comparable
return false
case *ast.InterfaceType:
// Interface types are comparable
return true
case *ast.StructType:
// Structs are comparable if all fields are comparable
// We can't easily determine this without full type information,
// so we conservatively return false for struct literals
return false
case *ast.SelectorExpr:
// Qualified identifier (e.g., pkg.Type)
// We can't determine comparability without type information
// Check for known non-comparable types from standard library
if ident, ok := t.X.(*ast.Ident); ok {
pkgName := ident.Name
typeName := t.Sel.Name
// Check for known non-comparable types
if pkgName == "context" && typeName == "Context" {
// context.Context is an interface, which is comparable
return true
}
// For other qualified types, we assume they're comparable
// This is a conservative approach
}
return true
case *ast.IndexExpr, *ast.IndexListExpr:
// Generic types - we can't determine comparability without type information
// For common generic types, we can make educated guesses
var baseExpr ast.Expr
if idx, ok := t.(*ast.IndexExpr); ok {
baseExpr = idx.X
} else if idxList, ok := t.(*ast.IndexListExpr); ok {
baseExpr = idxList.X
}
if sel, ok := baseExpr.(*ast.SelectorExpr); ok {
if ident, ok := sel.X.(*ast.Ident); ok {
pkgName := ident.Name
typeName := sel.Sel.Name
// Check for known non-comparable generic types
if pkgName == "option" && typeName == "Option" {
// Option types are not comparable (they contain a slice internally)
return false
}
if pkgName == "either" && typeName == "Either" {
// Either types are not comparable
return false
}
}
}
// For other generic types, conservatively assume not comparable
log.Printf("Not comparable type: %v\n", t)
return false
case *ast.ChanType:
// Channel types are comparable
return true
default:
// Unknown type, conservatively assume not comparable
return false
}
}
// embeddedFieldResult holds both the field info and its AST type for import extraction
type embeddedFieldResult struct {
fieldInfo fieldInfo
fieldType ast.Expr
}
// extractEmbeddedFields extracts fields from an embedded struct type
// It returns a slice of embeddedFieldResult for all exported fields in the embedded struct
// typeParamsMap contains the type parameters of the parent struct (for checking comparability)
func extractEmbeddedFields(embedType ast.Expr, fileImports map[string]string, file *ast.File, typeParamsMap map[string]string) []embeddedFieldResult {
var results []embeddedFieldResult
// Get the type name of the embedded field
var typeName string
var typeIdent *ast.Ident
switch t := embedType.(type) {
case *ast.Ident:
// Direct embedded type: type MyStruct struct { EmbeddedType }
typeName = t.Name
typeIdent = t
case *ast.StarExpr:
// Pointer embedded type: type MyStruct struct { *EmbeddedType }
if ident, ok := t.X.(*ast.Ident); ok {
typeName = ident.Name
typeIdent = ident
}
case *ast.SelectorExpr:
// Qualified embedded type: type MyStruct struct { pkg.EmbeddedType }
// We can't easily resolve this without full type information
// For now, skip these
return results
}
if typeName == "" || typeIdent == nil {
return results
}
// Find the struct definition in the same file
var embeddedStructType *ast.StructType
ast.Inspect(file, func(n ast.Node) bool {
if ts, ok := n.(*ast.TypeSpec); ok {
if ts.Name.Name == typeName {
if st, ok := ts.Type.(*ast.StructType); ok {
embeddedStructType = st
return false
}
}
}
return true
})
if embeddedStructType == nil {
// Struct not found in this file, might be from another package
return results
}
// Extract fields from the embedded struct
for _, field := range embeddedStructType.Fields.List {
// Skip embedded fields within embedded structs (for now, to avoid infinite recursion)
if len(field.Names) == 0 {
continue
}
for _, name := range field.Names {
// Only export lenses for exported fields
if name.IsExported() {
fieldTypeName := getTypeName(field.Type)
isOptional := false
baseType := fieldTypeName
// Check if field is optional
if isPointerType(field.Type) {
isOptional = true
baseType = strings.TrimPrefix(fieldTypeName, "*")
} else if hasOmitEmpty(field.Tag) {
isOptional = true
}
// Check if the type is comparable
isComparable := isComparableType(field.Type, typeParamsMap)
results = append(results, embeddedFieldResult{
fieldInfo: fieldInfo{
Name: name.Name,
TypeName: fieldTypeName,
BaseType: baseType,
IsOptional: isOptional,
IsComparable: isComparable,
},
fieldType: field.Type,
})
}
}
}
return results
}
// extractTypeParams extracts type parameters from a type spec
// Returns two strings: full params like "[T any]" and names only like "[T]"
func extractTypeParams(typeSpec *ast.TypeSpec) (string, string) {
if typeSpec.TypeParams == nil || len(typeSpec.TypeParams.List) == 0 {
return "", ""
}
var params []string
var names []string
for _, field := range typeSpec.TypeParams.List {
for _, name := range field.Names {
constraint := getTypeName(field.Type)
params = append(params, name.Name+" "+constraint)
names = append(names, name.Name)
}
}
fullParams := "[" + strings.Join(params, ", ") + "]"
nameParams := "[" + strings.Join(names, ", ") + "]"
return fullParams, nameParams
}
// buildTypeParamsMap creates a map of type parameter names to their constraints
// e.g., for "type Box[T any, K comparable]", returns {"T": "any", "K": "comparable"}
func buildTypeParamsMap(typeSpec *ast.TypeSpec) map[string]string {
typeParamsMap := make(map[string]string)
if typeSpec.TypeParams == nil || len(typeSpec.TypeParams.List) == 0 {
return typeParamsMap
}
for _, field := range typeSpec.TypeParams.List {
constraint := getTypeName(field.Type)
for _, name := range field.Names {
typeParamsMap[name.Name] = constraint
}
}
return typeParamsMap
}
// parseFile parses a Go file and extracts structs with lens annotations
func parseFile(filename string) ([]structInfo, string, error) {
fset := token.NewFileSet()
@@ -320,9 +607,27 @@ func parseFile(filename string) ([]structInfo, string, error) {
var fields []fieldInfo
structImports := make(map[string]string)
// Build type parameters map for this struct
typeParamsMap := buildTypeParamsMap(typeSpec)
for _, field := range structType.Fields.List {
if len(field.Names) == 0 {
// Embedded field, skip for now
// Embedded field - promote its fields
embeddedResults := extractEmbeddedFields(field.Type, fileImports, node, typeParamsMap)
for _, embResult := range embeddedResults {
// Extract imports from embedded field's type
fieldImports := make(map[string]string)
extractImports(embResult.fieldType, fieldImports)
// Resolve package names to full import paths
for pkgName := range fieldImports {
if importPath, ok := fileImports[pkgName]; ok {
structImports[importPath] = pkgName
}
}
fields = append(fields, embResult.fieldInfo)
}
continue
}
for _, name := range field.Names {
@@ -331,6 +636,7 @@ func parseFile(filename string) ([]structInfo, string, error) {
typeName := getTypeName(field.Type)
isOptional := false
baseType := typeName
isComparable := false
// Check if field is optional:
// 1. Pointer types are always optional
@@ -344,6 +650,11 @@ func parseFile(filename string) ([]structInfo, string, error) {
isOptional = true
}
// Check if the type is comparable (for non-optional fields)
// For optional fields, we don't need to check since they use LensO
isComparable = isComparableType(field.Type, typeParamsMap)
// log.Printf("field %s, type: %v, isComparable: %b\n", name, field.Type, isComparable)
// Extract imports from this field's type
fieldImports := make(map[string]string)
extractImports(field.Type, fieldImports)
@@ -356,20 +667,24 @@ func parseFile(filename string) ([]structInfo, string, error) {
}
fields = append(fields, fieldInfo{
Name: name.Name,
TypeName: typeName,
BaseType: baseType,
IsOptional: isOptional,
Name: name.Name,
TypeName: typeName,
BaseType: baseType,
IsOptional: isOptional,
IsComparable: isComparable,
})
}
}
}
if len(fields) > 0 {
typeParams, typeParamNames := extractTypeParams(typeSpec)
structs = append(structs, structInfo{
Name: typeSpec.Name.Name,
Fields: fields,
Imports: structImports,
Name: typeSpec.Name.Name,
TypeParams: typeParams,
TypeParamNames: typeParamNames,
Fields: fields,
Imports: structImports,
})
}
@@ -469,8 +784,8 @@ func generateLensHelpers(dir, filename string, verbose bool) error {
// Standard fp-go imports always needed
f.WriteString("\tL \"github.com/IBM/fp-go/v2/optics/lens\"\n")
f.WriteString("\tLO \"github.com/IBM/fp-go/v2/optics/lens/option\"\n")
f.WriteString("\tO \"github.com/IBM/fp-go/v2/option\"\n")
f.WriteString("\tI \"github.com/IBM/fp-go/v2/optics/iso/option\"\n")
// f.WriteString("\tO \"github.com/IBM/fp-go/v2/option\"\n")
f.WriteString("\tIO \"github.com/IBM/fp-go/v2/optics/iso/option\"\n")
// Add additional imports collected from field types
for importPath, alias := range allImports {

View File

@@ -168,6 +168,91 @@ func TestIsPointerType(t *testing.T) {
}
}
func TestIsComparableType(t *testing.T) {
tests := []struct {
name string
code string
expected bool
}{
{
name: "basic type - string",
code: "type T struct { F string }",
expected: true,
},
{
name: "basic type - int",
code: "type T struct { F int }",
expected: true,
},
{
name: "basic type - bool",
code: "type T struct { F bool }",
expected: true,
},
{
name: "pointer type",
code: "type T struct { F *string }",
expected: true,
},
{
name: "slice type - not comparable",
code: "type T struct { F []string }",
expected: false,
},
{
name: "map type - not comparable",
code: "type T struct { F map[string]int }",
expected: false,
},
{
name: "array type - comparable if element is",
code: "type T struct { F [5]int }",
expected: true,
},
{
name: "interface type",
code: "type T struct { F interface{} }",
expected: true,
},
{
name: "channel type",
code: "type T struct { F chan int }",
expected: true,
},
{
name: "function type - not comparable",
code: "type T struct { F func() }",
expected: false,
},
{
name: "struct literal - conservatively not comparable",
code: "type T struct { F struct{ X int } }",
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fset := token.NewFileSet()
file, err := parser.ParseFile(fset, "", "package test\n"+tt.code, 0)
require.NoError(t, err)
var fieldType ast.Expr
ast.Inspect(file, func(n ast.Node) bool {
if field, ok := n.(*ast.Field); ok && len(field.Names) > 0 {
fieldType = field.Type
return false
}
return true
})
require.NotNil(t, fieldType)
result := isComparableType(fieldType, map[string]string{})
assert.Equal(t, tt.expected, result)
})
}
}
func TestHasOmitEmpty(t *testing.T) {
tests := []struct {
name string
@@ -337,6 +422,167 @@ type Config struct {
assert.False(t, config.Fields[4].IsOptional, "Required field without omitempty should not be optional")
}
func TestParseFileWithComparableTypes(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// fp-go:Lens
type TypeTest struct {
Name string
Age int
Pointer *string
Slice []string
Map map[string]int
Channel chan int
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check TypeTest struct
typeTest := structs[0]
assert.Equal(t, "TypeTest", typeTest.Name)
assert.Len(t, typeTest.Fields, 6)
// Name - string is comparable
assert.Equal(t, "Name", typeTest.Fields[0].Name)
assert.Equal(t, "string", typeTest.Fields[0].TypeName)
assert.False(t, typeTest.Fields[0].IsOptional)
assert.True(t, typeTest.Fields[0].IsComparable, "string should be comparable")
// Age - int is comparable
assert.Equal(t, "Age", typeTest.Fields[1].Name)
assert.Equal(t, "int", typeTest.Fields[1].TypeName)
assert.False(t, typeTest.Fields[1].IsOptional)
assert.True(t, typeTest.Fields[1].IsComparable, "int should be comparable")
// Pointer - pointer is optional, IsComparable not checked for optional fields
assert.Equal(t, "Pointer", typeTest.Fields[2].Name)
assert.Equal(t, "*string", typeTest.Fields[2].TypeName)
assert.True(t, typeTest.Fields[2].IsOptional)
// Slice - not comparable
assert.Equal(t, "Slice", typeTest.Fields[3].Name)
assert.Equal(t, "[]string", typeTest.Fields[3].TypeName)
assert.False(t, typeTest.Fields[3].IsOptional)
assert.False(t, typeTest.Fields[3].IsComparable, "slice should not be comparable")
// Map - not comparable
assert.Equal(t, "Map", typeTest.Fields[4].Name)
assert.Equal(t, "map[string]int", typeTest.Fields[4].TypeName)
assert.False(t, typeTest.Fields[4].IsOptional)
assert.False(t, typeTest.Fields[4].IsComparable, "map should not be comparable")
// Channel - comparable (note: getTypeName returns "any" for channel types, but isComparableType correctly identifies them)
assert.Equal(t, "Channel", typeTest.Fields[5].Name)
assert.Equal(t, "any", typeTest.Fields[5].TypeName) // getTypeName doesn't handle chan types specifically
assert.False(t, typeTest.Fields[5].IsOptional)
assert.True(t, typeTest.Fields[5].IsComparable, "channel should be comparable")
}
func TestLensRefTemplatesWithComparable(t *testing.T) {
s := structInfo{
Name: "TestStruct",
Fields: []fieldInfo{
{Name: "Name", TypeName: "string", IsOptional: false, IsComparable: true},
{Name: "Age", TypeName: "int", IsOptional: false, IsComparable: true},
{Name: "Data", TypeName: "[]byte", IsOptional: false, IsComparable: false},
{Name: "Pointer", TypeName: "*string", IsOptional: true, IsComparable: false},
},
}
// Test constructor template for RefLenses
var constructorBuf bytes.Buffer
err := constructorTmpl.Execute(&constructorBuf, s)
require.NoError(t, err)
constructorStr := constructorBuf.String()
// Check that MakeLensStrict is used for comparable types in RefLenses
assert.Contains(t, constructorStr, "func MakeTestStructRefLenses() TestStructRefLenses")
// Name field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "lensName := L.MakeLensStrict(",
"comparable field Name should use MakeLensStrict in RefLenses")
// Age field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "lensAge := L.MakeLensStrict(",
"comparable field Age should use MakeLensStrict in RefLenses")
// Data field - not comparable, should use MakeLensRef
assert.Contains(t, constructorStr, "lensData := L.MakeLensRef(",
"non-comparable field Data should use MakeLensRef in RefLenses")
}
func TestGenerateLensHelpersWithComparable(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
testCode := `package testpkg
// fp-go:Lens
type TestStruct struct {
Name string
Count int
Data []byte
}
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
require.NoError(t, err)
// Verify the generated file exists
genPath := filepath.Join(tmpDir, outputFile)
_, err = os.Stat(genPath)
require.NoError(t, err)
// Read and verify the generated content
content, err := os.ReadFile(genPath)
require.NoError(t, err)
contentStr := string(content)
// Check for expected content in RefLenses
assert.Contains(t, contentStr, "MakeTestStructRefLenses")
// Name and Count are comparable, should use MakeLensStrict
assert.Contains(t, contentStr, "L.MakeLensStrict",
"comparable fields should use MakeLensStrict in RefLenses")
// Data is not comparable (slice), should use MakeLensRef
assert.Contains(t, contentStr, "L.MakeLensRef",
"non-comparable fields should use MakeLensRef in RefLenses")
// Verify the pattern appears for Name field (comparable)
namePattern := "lensName := L.MakeLensStrict("
assert.Contains(t, contentStr, namePattern,
"Name field should use MakeLensStrict")
// Verify the pattern appears for Data field (not comparable)
dataPattern := "lensData := L.MakeLensRef("
assert.Contains(t, contentStr, dataPattern,
"Data field should use MakeLensRef")
}
func TestGenerateLensHelpers(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
@@ -373,11 +619,11 @@ type TestStruct struct {
// Check for expected content
assert.Contains(t, contentStr, "package testpkg")
assert.Contains(t, contentStr, "Code generated by go generate")
assert.Contains(t, contentStr, "TestStructLens")
assert.Contains(t, contentStr, "MakeTestStructLens")
assert.Contains(t, contentStr, "TestStructLenses")
assert.Contains(t, contentStr, "MakeTestStructLenses")
assert.Contains(t, contentStr, "L.Lens[TestStruct, string]")
assert.Contains(t, contentStr, "LO.LensO[TestStruct, *int]")
assert.Contains(t, contentStr, "I.FromZero")
assert.Contains(t, contentStr, "IO.FromZero")
}
func TestGenerateLensHelpersNoAnnotations(t *testing.T) {
@@ -411,8 +657,8 @@ func TestLensTemplates(t *testing.T) {
s := structInfo{
Name: "TestStruct",
Fields: []fieldInfo{
{Name: "Name", TypeName: "string", IsOptional: false},
{Name: "Value", TypeName: "*int", IsOptional: true},
{Name: "Name", TypeName: "string", IsOptional: false, IsComparable: true},
{Name: "Value", TypeName: "*int", IsOptional: true, IsComparable: true},
},
}
@@ -424,7 +670,9 @@ func TestLensTemplates(t *testing.T) {
structStr := structBuf.String()
assert.Contains(t, structStr, "type TestStructLenses struct")
assert.Contains(t, structStr, "Name L.Lens[TestStruct, string]")
assert.Contains(t, structStr, "Value LO.LensO[TestStruct, *int]")
assert.Contains(t, structStr, "NameO LO.LensO[TestStruct, string]")
assert.Contains(t, structStr, "Value L.Lens[TestStruct, *int]")
assert.Contains(t, structStr, "ValueO LO.LensO[TestStruct, *int]")
// Test constructor template
var constructorBuf bytes.Buffer
@@ -434,19 +682,21 @@ func TestLensTemplates(t *testing.T) {
constructorStr := constructorBuf.String()
assert.Contains(t, constructorStr, "func MakeTestStructLenses() TestStructLenses")
assert.Contains(t, constructorStr, "return TestStructLenses{")
assert.Contains(t, constructorStr, "Name: L.MakeLens(")
assert.Contains(t, constructorStr, "Value: L.MakeLens(")
assert.Contains(t, constructorStr, "I.FromZero")
assert.Contains(t, constructorStr, "Name: lensName,")
assert.Contains(t, constructorStr, "NameO: lensNameO,")
assert.Contains(t, constructorStr, "Value: lensValue,")
assert.Contains(t, constructorStr, "ValueO: lensValueO,")
assert.Contains(t, constructorStr, "IO.FromZero")
}
func TestLensTemplatesWithOmitEmpty(t *testing.T) {
s := structInfo{
Name: "ConfigStruct",
Fields: []fieldInfo{
{Name: "Name", TypeName: "string", IsOptional: false},
{Name: "Value", TypeName: "string", IsOptional: true}, // non-pointer with omitempty
{Name: "Count", TypeName: "int", IsOptional: true}, // non-pointer with omitempty
{Name: "Pointer", TypeName: "*string", IsOptional: true}, // pointer
{Name: "Name", TypeName: "string", IsOptional: false, IsComparable: true},
{Name: "Value", TypeName: "string", IsOptional: true, IsComparable: true}, // non-pointer with omitempty
{Name: "Count", TypeName: "int", IsOptional: true, IsComparable: true}, // non-pointer with omitempty
{Name: "Pointer", TypeName: "*string", IsOptional: true, IsComparable: true}, // pointer
},
}
@@ -458,9 +708,13 @@ func TestLensTemplatesWithOmitEmpty(t *testing.T) {
structStr := structBuf.String()
assert.Contains(t, structStr, "type ConfigStructLenses struct")
assert.Contains(t, structStr, "Name L.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "Value LO.LensO[ConfigStruct, string]", "non-pointer with omitempty should use LensO")
assert.Contains(t, structStr, "Count LO.LensO[ConfigStruct, int]", "non-pointer with omitempty should use LensO")
assert.Contains(t, structStr, "Pointer LO.LensO[ConfigStruct, *string]")
assert.Contains(t, structStr, "NameO LO.LensO[ConfigStruct, string]")
assert.Contains(t, structStr, "Value L.Lens[ConfigStruct, string]")
assert.Contains(t, structStr, "ValueO LO.LensO[ConfigStruct, string]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Count L.Lens[ConfigStruct, int]")
assert.Contains(t, structStr, "CountO LO.LensO[ConfigStruct, int]", "comparable non-pointer with omitempty should have optional lens")
assert.Contains(t, structStr, "Pointer L.Lens[ConfigStruct, *string]")
assert.Contains(t, structStr, "PointerO LO.LensO[ConfigStruct, *string]")
// Test constructor template
var constructorBuf bytes.Buffer
@@ -469,9 +723,9 @@ func TestLensTemplatesWithOmitEmpty(t *testing.T) {
constructorStr := constructorBuf.String()
assert.Contains(t, constructorStr, "func MakeConfigStructLenses() ConfigStructLenses")
assert.Contains(t, constructorStr, "isoValue := I.FromZero[string]()")
assert.Contains(t, constructorStr, "isoCount := I.FromZero[int]()")
assert.Contains(t, constructorStr, "isoPointer := I.FromZero[*string]()")
assert.Contains(t, constructorStr, "IO.FromZero[string]()")
assert.Contains(t, constructorStr, "IO.FromZero[int]()")
assert.Contains(t, constructorStr, "IO.FromZero[*string]()")
}
func TestLensCommandFlags(t *testing.T) {
@@ -480,7 +734,7 @@ func TestLensCommandFlags(t *testing.T) {
assert.Equal(t, "lens", cmd.Name)
assert.Equal(t, "generate lens code for annotated structs", cmd.Usage)
assert.Contains(t, strings.ToLower(cmd.Description), "fp-go:lens")
assert.Contains(t, strings.ToLower(cmd.Description), "lenso")
assert.Contains(t, strings.ToLower(cmd.Description), "lenso", "Description should mention LensO for optional lenses")
// Check flags
assert.Len(t, cmd.Flags, 3)
@@ -501,3 +755,330 @@ func TestLensCommandFlags(t *testing.T) {
assert.True(t, hasFilename, "should have filename flag")
assert.True(t, hasVerbose, "should have verbose flag")
}
func TestParseFileWithEmbeddedStruct(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// Base struct to be embedded
type Base struct {
ID int
Name string
}
// fp-go:Lens
type Extended struct {
Base
Extra string
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check Extended struct
extended := structs[0]
assert.Equal(t, "Extended", extended.Name)
assert.Len(t, extended.Fields, 3, "Should have 3 fields: ID, Name (from Base), and Extra")
// Check that embedded fields are promoted
fieldNames := make(map[string]bool)
for _, field := range extended.Fields {
fieldNames[field.Name] = true
}
assert.True(t, fieldNames["ID"], "Should have promoted ID field from Base")
assert.True(t, fieldNames["Name"], "Should have promoted Name field from Base")
assert.True(t, fieldNames["Extra"], "Should have Extra field")
}
func TestGenerateLensHelpersWithEmbeddedStruct(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
testCode := `package testpkg
// Base struct to be embedded
type Address struct {
Street string
City string
}
// fp-go:Lens
type Person struct {
Address
Name string
Age int
}
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
require.NoError(t, err)
// Verify the generated file exists
genPath := filepath.Join(tmpDir, outputFile)
_, err = os.Stat(genPath)
require.NoError(t, err)
// Read and verify the generated content
content, err := os.ReadFile(genPath)
require.NoError(t, err)
contentStr := string(content)
// Check for expected content
assert.Contains(t, contentStr, "package testpkg")
assert.Contains(t, contentStr, "PersonLenses")
assert.Contains(t, contentStr, "MakePersonLenses")
// Check that embedded fields are included
assert.Contains(t, contentStr, "Street L.Lens[Person, string]", "Should have lens for embedded Street field")
assert.Contains(t, contentStr, "City L.Lens[Person, string]", "Should have lens for embedded City field")
assert.Contains(t, contentStr, "Name L.Lens[Person, string]", "Should have lens for Name field")
assert.Contains(t, contentStr, "Age L.Lens[Person, int]", "Should have lens for Age field")
// Check that optional lenses are also generated for embedded fields
assert.Contains(t, contentStr, "StreetO LO.LensO[Person, string]")
assert.Contains(t, contentStr, "CityO LO.LensO[Person, string]")
}
func TestParseFileWithPointerEmbeddedStruct(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// Base struct to be embedded
type Metadata struct {
CreatedAt string
UpdatedAt string
}
// fp-go:Lens
type Document struct {
*Metadata
Title string
Content string
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check Document struct
doc := structs[0]
assert.Equal(t, "Document", doc.Name)
assert.Len(t, doc.Fields, 4, "Should have 4 fields: CreatedAt, UpdatedAt (from *Metadata), Title, and Content")
// Check that embedded fields are promoted
fieldNames := make(map[string]bool)
for _, field := range doc.Fields {
fieldNames[field.Name] = true
}
assert.True(t, fieldNames["CreatedAt"], "Should have promoted CreatedAt field from *Metadata")
assert.True(t, fieldNames["UpdatedAt"], "Should have promoted UpdatedAt field from *Metadata")
assert.True(t, fieldNames["Title"], "Should have Title field")
assert.True(t, fieldNames["Content"], "Should have Content field")
}
func TestParseFileWithGenericStruct(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// fp-go:Lens
type Container[T any] struct {
Value T
Count int
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check Container struct
container := structs[0]
assert.Equal(t, "Container", container.Name)
assert.Equal(t, "[T any]", container.TypeParams, "Should have type parameter [T any]")
assert.Len(t, container.Fields, 2)
assert.Equal(t, "Value", container.Fields[0].Name)
assert.Equal(t, "T", container.Fields[0].TypeName)
assert.Equal(t, "Count", container.Fields[1].Name)
assert.Equal(t, "int", container.Fields[1].TypeName)
}
func TestParseFileWithMultipleTypeParams(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// fp-go:Lens
type Pair[K comparable, V any] struct {
Key K
Value V
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check Pair struct
pair := structs[0]
assert.Equal(t, "Pair", pair.Name)
assert.Equal(t, "[K comparable, V any]", pair.TypeParams, "Should have type parameters [K comparable, V any]")
assert.Len(t, pair.Fields, 2)
assert.Equal(t, "Key", pair.Fields[0].Name)
assert.Equal(t, "K", pair.Fields[0].TypeName)
assert.Equal(t, "Value", pair.Fields[1].Name)
assert.Equal(t, "V", pair.Fields[1].TypeName)
}
func TestGenerateLensHelpersWithGenericStruct(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
testCode := `package testpkg
// fp-go:Lens
type Box[T any] struct {
Content T
Label string
}
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
require.NoError(t, err)
// Verify the generated file exists
genPath := filepath.Join(tmpDir, outputFile)
_, err = os.Stat(genPath)
require.NoError(t, err)
// Read and verify the generated content
content, err := os.ReadFile(genPath)
require.NoError(t, err)
contentStr := string(content)
// Check for expected content with type parameters
assert.Contains(t, contentStr, "package testpkg")
assert.Contains(t, contentStr, "type BoxLenses[T any] struct", "Should have generic BoxLenses type")
assert.Contains(t, contentStr, "type BoxRefLenses[T any] struct", "Should have generic BoxRefLenses type")
assert.Contains(t, contentStr, "func MakeBoxLenses[T any]() BoxLenses[T]", "Should have generic constructor")
assert.Contains(t, contentStr, "func MakeBoxRefLenses[T any]() BoxRefLenses[T]", "Should have generic ref constructor")
// Check that fields use the generic type parameter
assert.Contains(t, contentStr, "Content L.Lens[Box[T], T]", "Should have lens for generic Content field")
assert.Contains(t, contentStr, "Label L.Lens[Box[T], string]", "Should have lens for Label field")
// Check optional lenses - only for comparable types
// T any is not comparable, so ContentO should NOT be generated
assert.NotContains(t, contentStr, "ContentO LO.LensO[Box[T], T]", "T any is not comparable, should not have optional lens")
// string is comparable, so LabelO should be generated
assert.Contains(t, contentStr, "LabelO LO.LensO[Box[T], string]", "string is comparable, should have optional lens")
}
func TestGenerateLensHelpersWithComparableTypeParam(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
testCode := `package testpkg
// fp-go:Lens
type ComparableBox[T comparable] struct {
Key T
Value string
}
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
require.NoError(t, err)
// Verify the generated file exists
genPath := filepath.Join(tmpDir, outputFile)
_, err = os.Stat(genPath)
require.NoError(t, err)
// Read and verify the generated content
content, err := os.ReadFile(genPath)
require.NoError(t, err)
contentStr := string(content)
// Check for expected content with type parameters
assert.Contains(t, contentStr, "package testpkg")
assert.Contains(t, contentStr, "type ComparableBoxLenses[T comparable] struct", "Should have generic ComparableBoxLenses type")
assert.Contains(t, contentStr, "type ComparableBoxRefLenses[T comparable] struct", "Should have generic ComparableBoxRefLenses type")
// Check that Key field (with comparable constraint) uses MakeLensStrict in RefLenses
assert.Contains(t, contentStr, "lensKey := L.MakeLensStrict(", "Key field with comparable constraint should use MakeLensStrict")
// Check that Value field (string, always comparable) also uses MakeLensStrict
assert.Contains(t, contentStr, "lensValue := L.MakeLensStrict(", "Value field (string) should use MakeLensStrict")
// Verify that MakeLensRef is NOT used (since both fields are comparable)
assert.NotContains(t, contentStr, "L.MakeLensRef(", "Should not use MakeLensRef when all fields are comparable")
}

View File

@@ -53,12 +53,12 @@ import (
RIOE "github.com/IBM/fp-go/v2/context/readerioresult"
RIOEH "github.com/IBM/fp-go/v2/context/readerioresult/http"
E "github.com/IBM/fp-go/v2/either"
F "github.com/IBM/fp-go/v2/function"
R "github.com/IBM/fp-go/v2/http/builder"
H "github.com/IBM/fp-go/v2/http/headers"
LZ "github.com/IBM/fp-go/v2/lazy"
O "github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/result"
)
// Requester converts an http/builder.Builder into a ReaderIOResult that produces HTTP requests.
@@ -143,10 +143,10 @@ func Requester(builder *R.Builder) RIOEH.Requester {
return F.Pipe5(
builder.GetBody(),
O.Fold(LZ.Of(E.Of[error](withoutBody)), E.Map[error](withBody)),
E.Ap[func(string) RIOE.ReaderIOResult[*http.Request]](builder.GetTargetURL()),
E.Flap[error, RIOE.ReaderIOResult[*http.Request]](builder.GetMethod()),
E.GetOrElse(RIOE.Left[*http.Request]),
O.Fold(LZ.Of(result.Of(withoutBody)), result.Map(withBody)),
result.Ap[RIOE.Kleisli[string, *http.Request]](builder.GetTargetURL()),
result.Flap[RIOE.ReaderIOResult[*http.Request]](builder.GetMethod()),
result.GetOrElse(RIOE.Left[*http.Request]),
RIOE.Map(func(req *http.Request) *http.Request {
req.Header = H.Monoid.Concat(req.Header, builder.GetHeaders())
return req

View File

@@ -836,3 +836,8 @@ func ChainReaderOptionK[A, B any](onNone func() error) func(readeroption.Kleisli
func ChainFirstReaderOptionK[A, B any](onNone func() error) func(readeroption.Kleisli[context.Context, A, B]) Operator[A, A] {
return RIOR.ChainFirstReaderOptionK[context.Context, A, B](onNone)
}
//go:inline
func Read[A any](r context.Context) func(ReaderIOResult[A]) IOResult[A] {
return RIOR.Read[A](r)
}

View File

@@ -883,5 +883,3 @@ func BenchmarkExecuteApPar_CanceledContext(b *testing.B) {
benchResult = rioe(ctx)()
}
}
// Made with Bob

View File

@@ -875,5 +875,3 @@ func TestBracket(t *testing.T) {
assert.Equal(t, E.Left[int](err), res)
})
}
// Made with Bob

View File

@@ -92,3 +92,8 @@ func MonadFlap[B, A any](fab ReaderResult[func(A) B], a A) ReaderResult[B] {
func Flap[B, A any](a A) Operator[func(A) B, B] {
return readereither.Flap[context.Context, error, B](a)
}
//go:inline
func Read[A any](r context.Context) func(ReaderResult[A]) Result[A] {
return readereither.Read[error, A](r)
}

View File

@@ -23,11 +23,13 @@ import (
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/IBM/fp-go/v2/readereither"
"github.com/IBM/fp-go/v2/result"
)
type (
Option[A any] = option.Option[A]
Either[A any] = either.Either[error, A]
Result[A any] = result.Result[A]
// ReaderResult is a specialization of the Reader monad for the typical golang scenario
ReaderResult[A any] = readereither.ReaderEither[context.Context, error, A]

View File

@@ -648,5 +648,3 @@ func BenchmarkString_Left(b *testing.B) {
benchString = left.String()
}
}
// Made with Bob

View File

@@ -41,7 +41,7 @@ import (
// curriedAdd := endomorphism.Curry2(add)
// addFive := curriedAdd(5) // Returns an endomorphism that adds 5
// result := addFive(10) // Returns: 15
func Curry2[FCT ~func(T0, T1) T1, T0, T1 any](f FCT) Kleisli[T0, T1] {
func Curry2[FCT ~func(T0, T1) T1, T0, T1 any](f FCT) func(T0) Endomorphism[T1] {
return function.Curry2(f)
}
@@ -68,6 +68,6 @@ func Curry2[FCT ~func(T0, T1) T1, T0, T1 any](f FCT) Kleisli[T0, T1] {
// curriedCombine := endomorphism.Curry3(combine)
// addTen := curriedCombine(5)(5) // Returns an endomorphism that adds 10
// result := addTen(20) // Returns: 30
func Curry3[FCT ~func(T0, T1, T2) T2, T0, T1, T2 any](f FCT) func(T0) Kleisli[T1, T2] {
func Curry3[FCT ~func(T0, T1, T2) T2, T0, T1, T2 any](f FCT) func(T0) func(T1) Endomorphism[T2] {
return function.Curry3(f)
}

View File

@@ -39,13 +39,18 @@
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
//
// // Compose them
// doubleAndIncrement := endomorphism.Compose(double, increment)
// result := doubleAndIncrement(5) // (5 * 2) + 1 = 11
// // Compose them (RIGHT-TO-LEFT execution)
// composed := endomorphism.Compose(double, increment)
// result := composed(5) // increment(5) then double: (5 + 1) * 2 = 12
//
// // Chain them (LEFT-TO-RIGHT execution)
// chained := endomorphism.MonadChain(double, increment)
// result2 := chained(5) // double(5) then increment: (5 * 2) + 1 = 11
//
// # Monoid Operations
//
// Endomorphisms form a monoid, which means you can combine multiple endomorphisms:
// Endomorphisms form a monoid, which means you can combine multiple endomorphisms.
// The monoid uses Compose, which executes RIGHT-TO-LEFT:
//
// import (
// "github.com/IBM/fp-go/v2/endomorphism"
@@ -55,22 +60,39 @@
// // Get the monoid for int endomorphisms
// monoid := endomorphism.Monoid[int]()
//
// // Combine multiple endomorphisms
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)
// combined := M.ConcatAll(monoid)(
// func(x int) int { return x * 2 },
// func(x int) int { return x + 1 },
// func(x int) int { return x * 3 },
// func(x int) int { return x * 2 }, // applied third
// func(x int) int { return x + 1 }, // applied second
// func(x int) int { return x * 3 }, // applied first
// )
// result := combined(5) // ((5 * 2) + 1) * 3 = 33
// result := combined(5) // (5 * 3) = 15, (15 + 1) = 16, (16 * 2) = 32
//
// # Monad Operations
//
// The package also provides monadic operations for endomorphisms:
// The package also provides monadic operations for endomorphisms.
// MonadChain executes LEFT-TO-RIGHT, unlike Compose:
//
// // Chain allows sequencing of endomorphisms
// // Chain allows sequencing of endomorphisms (LEFT-TO-RIGHT)
// f := func(x int) int { return x * 2 }
// g := func(x int) int { return x + 1 }
// chained := endomorphism.MonadChain(f, g)
// chained := endomorphism.MonadChain(f, g) // f first, then g
// result := chained(5) // (5 * 2) + 1 = 11
//
// # Compose vs Chain
//
// The key difference between Compose and Chain/MonadChain is execution order:
//
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
//
// // Compose: RIGHT-TO-LEFT (mathematical composition)
// composed := endomorphism.Compose(double, increment)
// result1 := composed(5) // increment(5) * 2 = (5 + 1) * 2 = 12
//
// // MonadChain: LEFT-TO-RIGHT (sequential application)
// chained := endomorphism.MonadChain(double, increment)
// result2 := chained(5) // double(5) + 1 = (5 * 2) + 1 = 11
//
// # Type Safety
//

View File

@@ -17,115 +17,372 @@ package endomorphism
import (
"github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/identity"
)
// MonadAp applies an endomorphism to a value in a monadic context.
// MonadAp applies an endomorphism in a function to an endomorphism value.
//
// This function applies the endomorphism fab to the value fa, returning the result.
// It's the monadic application operation for endomorphisms.
// For endomorphisms, Ap composes two endomorphisms using RIGHT-TO-LEFT composition.
// This is the applicative functor operation for endomorphisms.
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT (same as MonadCompose):
// - fa is applied first to the input
// - fab is applied to the result
//
// Parameters:
// - fab: An endomorphism to apply
// - fa: The value to apply the endomorphism to
// - fab: An endomorphism to apply (outer function)
// - fa: An endomorphism to apply first (inner function)
//
// Returns:
// - The result of applying fab to fa
//
// Example:
//
// double := func(x int) int { return x * 2 }
// result := endomorphism.MonadAp(double, 5) // Returns: 10
func MonadAp[A any](fab Endomorphism[A], fa A) A {
return identity.MonadAp(fab, fa)
}
// Ap returns a function that applies a value to an endomorphism.
//
// This is the curried version of MonadAp. It takes a value and returns a function
// that applies that value to any endomorphism.
//
// Parameters:
// - fa: The value to be applied
//
// Returns:
// - A function that takes an endomorphism and applies fa to it
//
// Example:
//
// applyFive := endomorphism.Ap(5)
// double := func(x int) int { return x * 2 }
// result := applyFive(double) // Returns: 10
func Ap[A any](fa A) func(Endomorphism[A]) A {
return identity.Ap[A](fa)
}
// Compose composes two endomorphisms into a single endomorphism.
//
// Given two endomorphisms f1 and f2, Compose returns a new endomorphism that
// applies f1 first, then applies f2 to the result. This is function composition:
// Compose(f1, f2)(x) = f2(f1(x))
//
// Composition is associative: Compose(Compose(f, g), h) = Compose(f, Compose(g, h))
//
// Parameters:
// - f1: The first endomorphism to apply
// - f2: The second endomorphism to apply
//
// Returns:
// - A new endomorphism that is the composition of f1 and f2
// - A new endomorphism that applies fa, then fab
//
// Example:
//
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
// doubleAndIncrement := endomorphism.Compose(double, increment)
// result := doubleAndIncrement(5) // (5 * 2) + 1 = 11
func Compose[A any](f1, f2 Endomorphism[A]) Endomorphism[A] {
return function.Flow2(f1, f2)
// result := endomorphism.MonadAp(double, increment) // Composes: double increment
// // result(5) = double(increment(5)) = double(6) = 12
func MonadAp[A any](fab Endomorphism[A], fa Endomorphism[A]) Endomorphism[A] {
return MonadCompose(fab, fa)
}
// MonadChain chains two endomorphisms together.
// Ap returns a function that applies an endomorphism to another endomorphism.
//
// This is the monadic bind operation for endomorphisms. It composes two endomorphisms
// ma and f, returning a new endomorphism that applies ma first, then f.
// MonadChain is equivalent to Compose.
// This is the curried version of MonadAp. It takes an endomorphism fa and returns
// a function that composes any endomorphism with fa using RIGHT-TO-LEFT composition.
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT:
// - fa is applied first to the input
// - The endomorphism passed to the returned function is applied to the result
//
// Parameters:
// - ma: The first endomorphism in the chain
// - f: The second endomorphism in the chain
// - fa: The first endomorphism to apply (inner function)
//
// Returns:
// - A new endomorphism that chains ma and f
// - A function that takes an endomorphism and composes it with fa (right-to-left)
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// applyIncrement := endomorphism.Ap(increment)
// double := func(x int) int { return x * 2 }
// composed := applyIncrement(double) // double ∘ increment
// // composed(5) = double(increment(5)) = double(6) = 12
func Ap[A any](fa Endomorphism[A]) Operator[A] {
return Compose(fa)
}
// MonadCompose composes two endomorphisms, executing them from right to left.
//
// MonadCompose creates a new endomorphism that applies f2 first, then f1.
// This follows the mathematical notation of function composition: (f1 ∘ f2)(x) = f1(f2(x))
//
// IMPORTANT: The execution order is RIGHT-TO-LEFT:
// - f2 is applied first to the input
// - f1 is applied to the result of f2
//
// This is different from Chain/MonadChain which executes LEFT-TO-RIGHT.
//
// Parameters:
// - f1: The second function to apply (outer function)
// - f2: The first function to apply (inner function)
//
// Returns:
// - A new endomorphism that applies f2, then f1
//
// Example:
//
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
//
// // MonadCompose executes RIGHT-TO-LEFT: increment first, then double
// composed := endomorphism.MonadCompose(double, increment)
// result := composed(5) // (5 + 1) * 2 = 12
//
// // Compare with Chain which executes LEFT-TO-RIGHT:
// chained := endomorphism.MonadChain(double, increment)
// result2 := chained(5) // (5 * 2) + 1 = 11
func MonadCompose[A any](f, g Endomorphism[A]) Endomorphism[A] {
return function.Flow2(g, f)
}
// MonadMap maps an endomorphism over another endomorphism using function composition.
//
// For endomorphisms, Map is equivalent to Compose (RIGHT-TO-LEFT composition).
// This is the functor map operation for endomorphisms.
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT:
// - g is applied first to the input
// - f is applied to the result
//
// Parameters:
// - f: The function to map (outer function)
// - g: The endomorphism to map over (inner function)
//
// Returns:
// - A new endomorphism that applies g, then f
//
// Example:
//
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
// mapped := endomorphism.MonadMap(double, increment)
// // mapped(5) = double(increment(5)) = double(6) = 12
func MonadMap[A any](f, g Endomorphism[A]) Endomorphism[A] {
return MonadCompose(f, g)
}
// Compose returns a function that composes an endomorphism with another, executing right to left.
//
// This is the curried version of MonadCompose. It takes an endomorphism g and returns
// a function that composes any endomorphism with g, applying g first (inner function),
// then the input endomorphism (outer function).
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT (mathematical composition):
// - g is applied first to the input
// - The endomorphism passed to the returned function is applied to the result of g
//
// This follows the mathematical composition notation where Compose(g)(f) = f ∘ g
//
// Parameters:
// - g: The first endomorphism to apply (inner function)
//
// Returns:
// - A function that takes an endomorphism f and composes it with g (right-to-left)
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// composeWithIncrement := endomorphism.Compose(increment)
// double := func(x int) int { return x * 2 }
//
// // Composes double with increment (RIGHT-TO-LEFT: increment first, then double)
// composed := composeWithIncrement(double)
// result := composed(5) // (5 + 1) * 2 = 12
//
// // Compare with Chain which executes LEFT-TO-RIGHT:
// chainWithIncrement := endomorphism.Chain(increment)
// chained := chainWithIncrement(double)
// result2 := chained(5) // (5 * 2) + 1 = 11
func Compose[A any](g Endomorphism[A]) Operator[A] {
return function.Bind2nd(MonadCompose, g)
}
// Map returns a function that maps an endomorphism over another endomorphism.
//
// This is the curried version of MonadMap. It takes an endomorphism f and returns
// a function that maps f over any endomorphism using RIGHT-TO-LEFT composition.
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT (same as Compose):
// - The endomorphism passed to the returned function is applied first
// - f is applied to the result
//
// For endomorphisms, Map is equivalent to Compose.
//
// Parameters:
// - f: The function to map (outer function)
//
// Returns:
// - A function that takes an endomorphism and maps f over it (right-to-left)
//
// Example:
//
// double := func(x int) int { return x * 2 }
// mapDouble := endomorphism.Map(double)
// increment := func(x int) int { return x + 1 }
// mapped := mapDouble(increment)
// // mapped(5) = double(increment(5)) = double(6) = 12
func Map[A any](f Endomorphism[A]) Operator[A] {
return Compose(f)
}
// MonadChain chains two endomorphisms together, executing them from left to right.
//
// This is the monadic bind operation for endomorphisms. For endomorphisms, bind is
// simply left-to-right function composition: ma is applied first, then f.
//
// IMPORTANT: The execution order is LEFT-TO-RIGHT:
// - ma is applied first to the input
// - f is applied to the result of ma
//
// This is different from MonadCompose which executes RIGHT-TO-LEFT.
//
// Parameters:
// - ma: The first endomorphism to apply
// - f: The second endomorphism to apply
//
// Returns:
// - A new endomorphism that applies ma, then f
//
// Example:
//
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
//
// // MonadChain executes LEFT-TO-RIGHT: double first, then increment
// chained := endomorphism.MonadChain(double, increment)
// result := chained(5) // (5 * 2) + 1 = 11
//
// // Compare with MonadCompose which executes RIGHT-TO-LEFT:
// composed := endomorphism.MonadCompose(increment, double)
// result2 := composed(5) // (5 * 2) + 1 = 11 (same result, different parameter order)
func MonadChain[A any](ma Endomorphism[A], f Endomorphism[A]) Endomorphism[A] {
return Compose(ma, f)
return function.Flow2(ma, f)
}
// Chain returns a function that chains an endomorphism with another.
// MonadChainFirst chains two endomorphisms but returns the result of the first.
//
// This is the curried version of MonadChain. It takes an endomorphism f and returns
// a function that chains any endomorphism with f.
// This applies ma first, then f, but discards the result of f and returns the result of ma.
// Useful for performing side-effects while preserving the original value.
//
// Parameters:
// - f: The endomorphism to chain with
// - ma: The endomorphism whose result to keep
// - f: The endomorphism to apply for its effect
//
// Returns:
// - A function that takes an endomorphism and chains it with f
// - A new endomorphism that applies both but returns ma's result
//
// Example:
//
// double := func(x int) int { return x * 2 }
// log := func(x int) int { fmt.Println(x); return x }
// chained := endomorphism.MonadChainFirst(double, log)
// result := chained(5) // Prints 10, returns 10
func MonadChainFirst[A any](ma Endomorphism[A], f Endomorphism[A]) Endomorphism[A] {
return func(a A) A {
result := ma(a)
f(result) // Apply f for its effect
return result // But return ma's result
}
}
// ChainFirst returns a function that chains for effect but preserves the original result.
//
// This is the curried version of MonadChainFirst.
//
// Parameters:
// - f: The endomorphism to apply for its effect
//
// Returns:
// - A function that takes an endomorphism and chains it with f, keeping the first result
//
// Example:
//
// log := func(x int) int { fmt.Println(x); return x }
// chainLog := endomorphism.ChainFirst(log)
// double := func(x int) int { return x * 2 }
// chained := chainLog(double)
// result := chained(5) // Prints 10, returns 10
func ChainFirst[A any](f Endomorphism[A]) Operator[A] {
return function.Bind2nd(MonadChainFirst, f)
}
// Chain returns a function that chains an endomorphism with another, executing left to right.
//
// This is the curried version of MonadChain. It takes an endomorphism f and returns
// a function that chains any endomorphism with f, applying the input endomorphism first,
// then f.
//
// IMPORTANT: Execution order is LEFT-TO-RIGHT:
// - The endomorphism passed to the returned function is applied first
// - f is applied to the result
//
// Parameters:
// - f: The second endomorphism to apply
//
// Returns:
// - A function that takes an endomorphism and chains it with f (left-to-right)
//
// Example:
//
// increment := func(x int) int { return x + 1 }
// chainWithIncrement := endomorphism.Chain(increment)
// double := func(x int) int { return x * 2 }
//
// // Chains double (first) with increment (second)
// chained := chainWithIncrement(double)
// result := chained(5) // (5 * 2) + 1 = 11
func Chain[A any](f Endomorphism[A]) Endomorphism[Endomorphism[A]] {
func Chain[A any](f Endomorphism[A]) Operator[A] {
return function.Bind2nd(MonadChain, f)
}
// Flatten collapses a nested endomorphism into a single endomorphism.
//
// Given an endomorphism that transforms endomorphisms (Endomorphism[Endomorphism[A]]),
// Flatten produces a simple endomorphism by applying the outer transformation to the
// identity function. This is the monadic join operation for the Endomorphism monad.
//
// The function applies the nested endomorphism to Identity[A] to extract the inner
// endomorphism, effectively "flattening" the two layers into one.
//
// Type Parameters:
// - A: The type being transformed by the endomorphisms
//
// Parameters:
// - mma: A nested endomorphism that transforms endomorphisms
//
// Returns:
// - An endomorphism that applies the transformation directly to values of type A
//
// Example:
//
// type Counter struct {
// Value int
// }
//
// // An endomorphism that wraps another endomorphism
// addThenDouble := func(endo Endomorphism[Counter]) Endomorphism[Counter] {
// return func(c Counter) Counter {
// c = endo(c) // Apply the input endomorphism
// c.Value = c.Value * 2 // Then double
// return c
// }
// }
//
// flattened := Flatten(addThenDouble)
// result := flattened(Counter{Value: 5}) // Counter{Value: 10}
func Flatten[A any](mma Endomorphism[Endomorphism[A]]) Endomorphism[A] {
return mma(function.Identity[A])
}
// Join performs self-application of a function that produces endomorphisms.
//
// Given a function that takes a value and returns an endomorphism of that same type,
// Join creates an endomorphism that applies the value to itself through the function.
// This operation is also known as the W combinator (warbler) in combinatory logic,
// or diagonal application.
//
// The resulting endomorphism evaluates f(a)(a), applying the same value a to both
// the function f and the resulting endomorphism.
//
// Type Parameters:
// - A: The type being transformed
//
// Parameters:
// - f: A function that takes a value and returns an endomorphism of that type
//
// Returns:
// - An endomorphism that performs self-application: f(a)(a)
//
// Example:
//
// type Point struct {
// X, Y int
// }
//
// // Create an endomorphism based on the input point
// scaleBy := func(p Point) Endomorphism[Point] {
// return func(p2 Point) Point {
// return Point{
// X: p2.X * p.X,
// Y: p2.Y * p.Y,
// }
// }
// }
//
// selfScale := Join(scaleBy)
// result := selfScale(Point{X: 3, Y: 4}) // Point{X: 9, Y: 16}
func Join[A any](f Kleisli[A]) Endomorphism[A] {
return func(a A) A {
return f(a)(a)
}
}

View File

@@ -76,84 +76,152 @@ func TestCurry3(t *testing.T) {
// TestMonadAp tests the MonadAp function
func TestMonadAp(t *testing.T) {
result := MonadAp(double, 5)
assert.Equal(t, 10, result, "MonadAp should apply endomorphism to value")
// MonadAp composes two endomorphisms (RIGHT-TO-LEFT)
// MonadAp(double, increment) means: increment first, then double
composed := MonadAp(double, increment)
result := composed(5)
assert.Equal(t, 12, result, "MonadAp should compose right-to-left: (5 + 1) * 2 = 12")
result2 := MonadAp(increment, 10)
assert.Equal(t, 11, result2, "MonadAp should work with different endomorphisms")
// Test with different order
composed2 := MonadAp(increment, double)
result2 := composed2(5)
assert.Equal(t, 11, result2, "MonadAp should compose right-to-left: (5 * 2) + 1 = 11")
result3 := MonadAp(square, 4)
assert.Equal(t, 16, result3, "MonadAp should work with square function")
// Test with square
composed3 := MonadAp(square, increment)
result3 := composed3(5)
assert.Equal(t, 36, result3, "MonadAp should compose right-to-left: (5 + 1) ^ 2 = 36")
}
// TestAp tests the Ap function
func TestAp(t *testing.T) {
applyFive := Ap(5)
// Ap is the curried version of MonadAp
// Ap(increment) returns a function that composes with increment (RIGHT-TO-LEFT)
applyIncrement := Ap(increment)
result := applyFive(double)
assert.Equal(t, 10, result, "Ap should apply value to endomorphism")
composed := applyIncrement(double)
result := composed(5)
assert.Equal(t, 12, result, "Ap should compose right-to-left: (5 + 1) * 2 = 12")
result2 := applyFive(increment)
assert.Equal(t, 6, result2, "Ap should work with different endomorphisms")
// Test with different endomorphism
composed2 := applyIncrement(square)
result2 := composed2(5)
assert.Equal(t, 36, result2, "Ap should compose right-to-left: (5 + 1) ^ 2 = 36")
applyTen := Ap(10)
result3 := applyTen(square)
assert.Equal(t, 100, result3, "Ap should work with different values")
// Test with different base endomorphism
applyDouble := Ap(double)
composed3 := applyDouble(increment)
result3 := composed3(5)
assert.Equal(t, 11, result3, "Ap should compose right-to-left: (5 * 2) + 1 = 11")
}
// TestCompose tests the Compose function
func TestCompose(t *testing.T) {
// Test basic composition: (5 * 2) + 1 = 11
doubleAndIncrement := Compose(double, increment)
result := doubleAndIncrement(5)
assert.Equal(t, 11, result, "Compose should compose endomorphisms correctly")
// TestMonadCompose tests the MonadCompose function
func TestMonadCompose(t *testing.T) {
// Test basic composition: RIGHT-TO-LEFT execution
// MonadCompose(double, increment) means: increment first, then double
composed := MonadCompose(double, increment)
result := composed(5)
assert.Equal(t, 12, result, "MonadCompose should execute right-to-left: (5 + 1) * 2 = 12")
// Test composition order: (5 + 1) * 2 = 12
incrementAndDouble := Compose(increment, double)
result2 := incrementAndDouble(5)
assert.Equal(t, 12, result2, "Compose should respect order of composition")
// Test composition order: RIGHT-TO-LEFT execution
// MonadCompose(increment, double) means: double first, then increment
composed2 := MonadCompose(increment, double)
result2 := composed2(5)
assert.Equal(t, 11, result2, "MonadCompose should execute right-to-left: (5 * 2) + 1 = 11")
// Test with three compositions: ((5 * 2) + 1) * ((5 * 2) + 1) = 121
complex := Compose(Compose(double, increment), square)
// Test with three compositions: RIGHT-TO-LEFT execution
// MonadCompose(MonadCompose(double, increment), square) means: square, then increment, then double
complex := MonadCompose(MonadCompose(double, increment), square)
result3 := complex(5)
assert.Equal(t, 121, result3, "Compose should work with nested compositions")
// 5 -> square -> 25 -> increment -> 26 -> double -> 52
assert.Equal(t, 52, result3, "MonadCompose should work with nested compositions: square(5)=25, +1=26, *2=52")
}
// TestMonadChain tests the MonadChain function
func TestMonadChain(t *testing.T) {
// MonadChain should behave like Compose
// MonadChain executes LEFT-TO-RIGHT (first arg first, second arg second)
chained := MonadChain(double, increment)
result := chained(5)
assert.Equal(t, 11, result, "MonadChain should chain endomorphisms correctly")
assert.Equal(t, 11, result, "MonadChain should execute left-to-right: (5 * 2) + 1 = 11")
chained2 := MonadChain(increment, double)
result2 := chained2(5)
assert.Equal(t, 12, result2, "MonadChain should respect order")
assert.Equal(t, 12, result2, "MonadChain should execute left-to-right: (5 + 1) * 2 = 12")
// Test with negative values
chained3 := MonadChain(negate, increment)
result3 := chained3(5)
assert.Equal(t, -4, result3, "MonadChain should work with negative values")
assert.Equal(t, -4, result3, "MonadChain should execute left-to-right: -(5) + 1 = -4")
}
// TestChain tests the Chain function
func TestChain(t *testing.T) {
// Chain(f) returns a function that applies its argument first, then f
chainWithIncrement := Chain(increment)
// chainWithIncrement(double) means: double first, then increment
chained := chainWithIncrement(double)
result := chained(5)
assert.Equal(t, 11, result, "Chain should create chaining function correctly")
assert.Equal(t, 11, result, "Chain should execute left-to-right: (5 * 2) + 1 = 11")
chainWithDouble := Chain(double)
// chainWithDouble(increment) means: increment first, then double
chained2 := chainWithDouble(increment)
result2 := chained2(5)
assert.Equal(t, 12, result2, "Chain should work with different endomorphisms")
assert.Equal(t, 12, result2, "Chain should execute left-to-right: (5 + 1) * 2 = 12")
// Test chaining with square
chainWithSquare := Chain(square)
// chainWithSquare(double) means: double first, then square
chained3 := chainWithSquare(double)
result3 := chained3(3)
assert.Equal(t, 36, result3, "Chain should work with square function")
assert.Equal(t, 36, result3, "Chain should execute left-to-right: (3 * 2) ^ 2 = 36")
}
// TestCompose tests the curried Compose function
func TestCompose(t *testing.T) {
// Compose(g) returns a function that applies g first, then its argument
composeWithIncrement := Compose(increment)
// composeWithIncrement(double) means: increment first, then double
composed := composeWithIncrement(double)
result := composed(5)
assert.Equal(t, 12, result, "Compose should execute right-to-left: (5 + 1) * 2 = 12")
composeWithDouble := Compose(double)
// composeWithDouble(increment) means: double first, then increment
composed2 := composeWithDouble(increment)
result2 := composed2(5)
assert.Equal(t, 11, result2, "Compose should execute right-to-left: (5 * 2) + 1 = 11")
// Test composing with square
composeWithSquare := Compose(square)
// composeWithSquare(double) means: square first, then double
composed3 := composeWithSquare(double)
result3 := composed3(3)
assert.Equal(t, 18, result3, "Compose should execute right-to-left: (3 ^ 2) * 2 = 18")
}
// TestMonadComposeVsCompose demonstrates the relationship between MonadCompose and Compose
func TestMonadComposeVsCompose(t *testing.T) {
double := func(x int) int { return x * 2 }
increment := func(x int) int { return x + 1 }
// MonadCompose takes both functions at once
monadComposed := MonadCompose(double, increment)
result1 := monadComposed(5) // (5 + 1) * 2 = 12
// Compose is the curried version - takes one function, returns a function
curriedCompose := Compose(increment)
composed := curriedCompose(double)
result2 := composed(5) // (5 + 1) * 2 = 12
assert.Equal(t, result1, result2, "MonadCompose and Compose should produce the same result")
assert.Equal(t, 12, result1, "Both should execute right-to-left: (5 + 1) * 2 = 12")
// Demonstrate that Compose(g)(f) is equivalent to MonadCompose(f, g)
assert.Equal(t, MonadCompose(double, increment)(5), Compose(increment)(double)(5),
"Compose(g)(f) should equal MonadCompose(f, g)")
}
// TestOf tests the Of function
@@ -191,12 +259,14 @@ func TestIdentity(t *testing.T) {
assert.Equal(t, 0, id(0), "Identity should work with zero")
assert.Equal(t, -10, id(-10), "Identity should work with negative values")
// Identity should be neutral for composition
composed1 := Compose(id, double)
assert.Equal(t, 10, composed1(5), "Identity should be right neutral for composition")
// Identity should be neutral for composition (RIGHT-TO-LEFT)
// Compose(id, double) means: double first, then id
composed1 := MonadCompose(id, double)
assert.Equal(t, 10, composed1(5), "Identity should be left neutral: double(5) = 10")
composed2 := Compose(double, id)
assert.Equal(t, 10, composed2(5), "Identity should be left neutral for composition")
// Compose(double, id) means: id first, then double
composed2 := MonadCompose(double, id)
assert.Equal(t, 10, composed2(5), "Identity should be right neutral: id(5) then double = 10")
// Test with strings
idStr := Identity[string]()
@@ -207,10 +277,11 @@ func TestIdentity(t *testing.T) {
func TestSemigroup(t *testing.T) {
sg := Semigroup[int]()
// Test basic concat
// Test basic concat (RIGHT-TO-LEFT execution via Compose)
// Concat(double, increment) means: increment first, then double
combined := sg.Concat(double, increment)
result := combined(5)
assert.Equal(t, 11, result, "Semigroup concat should compose endomorphisms")
assert.Equal(t, 12, result, "Semigroup concat should execute right-to-left: (5 + 1) * 2 = 12")
// Test associativity: (f . g) . h = f . (g . h)
f := double
@@ -223,10 +294,12 @@ func TestSemigroup(t *testing.T) {
testValue := 3
assert.Equal(t, left(testValue), right(testValue), "Semigroup should be associative")
// Test with ConcatAll from semigroup package
// Test with ConcatAll from semigroup package (RIGHT-TO-LEFT)
// ConcatAll(double)(increment, square) means: square, then increment, then double
combined2 := S.ConcatAll(sg)(double)([]Endomorphism[int]{increment, square})
result2 := combined2(5)
assert.Equal(t, 121, result2, "Semigroup should work with ConcatAll")
// 5 -> square -> 25 -> increment -> 26 -> double -> 52
assert.Equal(t, 52, result2, "Semigroup ConcatAll should execute right-to-left: square(5)=25, +1=26, *2=52")
}
// TestMonoid tests the Monoid function
@@ -237,19 +310,21 @@ func TestMonoid(t *testing.T) {
empty := monoid.Empty()
assert.Equal(t, 42, empty(42), "Monoid empty should be identity")
// Test right identity: x . empty = x
// Test right identity: x . empty = x (RIGHT-TO-LEFT: empty first, then x)
// Concat(double, empty) means: empty first, then double
rightIdentity := monoid.Concat(double, empty)
assert.Equal(t, 10, rightIdentity(5), "Monoid should satisfy right identity")
assert.Equal(t, 10, rightIdentity(5), "Monoid should satisfy right identity: empty(5) then double = 10")
// Test left identity: empty . x = x
// Test left identity: empty . x = x (RIGHT-TO-LEFT: x first, then empty)
// Concat(empty, double) means: double first, then empty
leftIdentity := monoid.Concat(empty, double)
assert.Equal(t, 10, leftIdentity(5), "Monoid should satisfy left identity")
assert.Equal(t, 10, leftIdentity(5), "Monoid should satisfy left identity: double(5) then empty = 10")
// Test ConcatAll with multiple endomorphisms
// Test ConcatAll with multiple endomorphisms (RIGHT-TO-LEFT execution)
combined := M.ConcatAll(monoid)([]Endomorphism[int]{double, increment, square})
result := combined(5)
// (5 * 2) = 10, (10 + 1) = 11, (11 * 11) = 121
assert.Equal(t, 121, result, "Monoid should work with ConcatAll")
// RIGHT-TO-LEFT: square(5) = 25, increment(25) = 26, double(26) = 52
assert.Equal(t, 52, result, "Monoid ConcatAll should execute right-to-left: square(5)=25, +1=26, *2=52")
// Test ConcatAll with empty list should return identity
emptyResult := M.ConcatAll(monoid)([]Endomorphism[int]{})
@@ -294,19 +369,20 @@ func TestMonoidLaws(t *testing.T) {
// TestEndomorphismWithDifferentTypes tests endomorphisms with different types
func TestEndomorphismWithDifferentTypes(t *testing.T) {
// Test with strings
toUpper := func(s string) string {
// Test with strings (RIGHT-TO-LEFT execution)
addExclamation := func(s string) string {
return s + "!"
}
addPrefix := func(s string) string {
return "Hello, " + s
}
strComposed := Compose(toUpper, addPrefix)
// Compose(addExclamation, addPrefix) means: addPrefix first, then addExclamation
strComposed := MonadCompose(addExclamation, addPrefix)
result := strComposed("World")
assert.Equal(t, "Hello, World!", result, "Endomorphism should work with strings")
assert.Equal(t, "Hello, World!", result, "Compose should execute right-to-left with strings")
// Test with float64
// Test with float64 (RIGHT-TO-LEFT execution)
doubleFloat := func(x float64) float64 {
return x * 2.0
}
@@ -314,64 +390,63 @@ func TestEndomorphismWithDifferentTypes(t *testing.T) {
return x + 1.0
}
floatComposed := Compose(doubleFloat, addOne)
// Compose(doubleFloat, addOne) means: addOne first, then doubleFloat
floatComposed := MonadCompose(doubleFloat, addOne)
resultFloat := floatComposed(5.5)
assert.Equal(t, 12.0, resultFloat, "Endomorphism should work with float64")
// 5.5 + 1.0 = 6.5, 6.5 * 2.0 = 13.0
assert.Equal(t, 13.0, resultFloat, "Compose should execute right-to-left: (5.5 + 1.0) * 2.0 = 13.0")
}
// TestComplexCompositions tests more complex composition scenarios
func TestComplexCompositions(t *testing.T) {
// Create a pipeline of transformations
pipeline := Compose(
Compose(
Compose(double, increment),
// Create a pipeline of transformations (RIGHT-TO-LEFT execution)
// Innermost Compose is evaluated first in the composition chain
pipeline := MonadCompose(
MonadCompose(
MonadCompose(double, increment),
square,
),
negate,
)
// (5 * 2) = 10, (10 + 1) = 11, (11 * 11) = 121, -(121) = -121
// RIGHT-TO-LEFT: negate(5) = -5, square(-5) = 25, increment(25) = 26, double(26) = 52
result := pipeline(5)
assert.Equal(t, -121, result, "Complex composition should work correctly")
assert.Equal(t, 52, result, "Complex composition should execute right-to-left")
// Test using monoid to build the same pipeline
// Test using monoid to build the same pipeline (RIGHT-TO-LEFT)
monoid := Monoid[int]()
pipelineMonoid := M.ConcatAll(monoid)([]Endomorphism[int]{double, increment, square, negate})
resultMonoid := pipelineMonoid(5)
assert.Equal(t, -121, resultMonoid, "Monoid-based pipeline should match composition")
// RIGHT-TO-LEFT: negate(5) = -5, square(-5) = 25, increment(25) = 26, double(26) = 52
assert.Equal(t, 52, resultMonoid, "Monoid-based pipeline should match composition (right-to-left)")
}
// TestOperatorType tests the Operator type
func TestOperatorType(t *testing.T) {
// Create an operator that lifts an int endomorphism to work on the length of strings
lengthOperator := func(f Endomorphism[int]) Endomorphism[string] {
return func(s string) string {
newLen := f(len(s))
if newLen > len(s) {
// Pad with spaces
for i := len(s); i < newLen; i++ {
s += " "
}
} else if newLen < len(s) {
// Truncate
s = s[:newLen]
}
return s
// Create an operator that transforms int endomorphisms
// This operator takes an endomorphism and returns a new one that applies it twice
applyTwice := func(f Endomorphism[int]) Endomorphism[int] {
return func(x int) int {
return f(f(x))
}
}
// Use the operator
var op Operator[int, string] = lengthOperator
doubleLength := op(double)
var op Operator[int] = applyTwice
doubleDouble := op(double)
result := doubleLength("hello") // len("hello") = 5, 5 * 2 = 10
assert.Equal(t, 10, len(result), "Operator should transform endomorphisms correctly")
assert.Equal(t, "hello ", result, "Operator should pad string correctly")
result := doubleDouble(5) // double(double(5)) = double(10) = 20
assert.Equal(t, 20, result, "Operator should transform endomorphisms correctly")
// Test with increment
incrementTwice := op(increment)
result2 := incrementTwice(5) // increment(increment(5)) = increment(6) = 7
assert.Equal(t, 7, result2, "Operator should work with different endomorphisms")
}
// BenchmarkCompose benchmarks the Compose function
func BenchmarkCompose(b *testing.B) {
composed := Compose(double, increment)
composed := MonadCompose(double, increment)
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = composed(5)
@@ -379,6 +454,47 @@ func BenchmarkCompose(b *testing.B) {
}
// BenchmarkMonoidConcatAll benchmarks ConcatAll with monoid
// TestComposeVsChain demonstrates the key difference between Compose and Chain
func TestComposeVsChain(t *testing.T) {
double := func(x int) int { return x * 2 }
increment := func(x int) int { return x + 1 }
// Compose executes RIGHT-TO-LEFT
// Compose(double, increment) means: increment first, then double
composed := MonadCompose(double, increment)
composedResult := composed(5) // (5 + 1) * 2 = 12
// MonadChain executes LEFT-TO-RIGHT
// MonadChain(double, increment) means: double first, then increment
chained := MonadChain(double, increment)
chainedResult := chained(5) // (5 * 2) + 1 = 11
assert.Equal(t, 12, composedResult, "Compose should execute right-to-left")
assert.Equal(t, 11, chainedResult, "MonadChain should execute left-to-right")
assert.NotEqual(t, composedResult, chainedResult, "Compose and Chain should produce different results with non-commutative operations")
// To get the same result with Compose, we need to reverse the order
composedReversed := MonadCompose(increment, double)
assert.Equal(t, chainedResult, composedReversed(5), "Compose with reversed args should match Chain")
// Demonstrate with a more complex example
square := func(x int) int { return x * x }
// Compose: RIGHT-TO-LEFT
composed3 := MonadCompose(MonadCompose(square, increment), double)
// double(5) = 10, increment(10) = 11, square(11) = 121
result1 := composed3(5)
// MonadChain: LEFT-TO-RIGHT
chained3 := MonadChain(MonadChain(double, increment), square)
// double(5) = 10, increment(10) = 11, square(11) = 121
result2 := chained3(5)
assert.Equal(t, 121, result1, "Compose should execute right-to-left")
assert.Equal(t, 121, result2, "MonadChain should execute left-to-right")
assert.Equal(t, result1, result2, "Both should produce same result when operations are in correct order")
}
func BenchmarkMonoidConcatAll(b *testing.B) {
monoid := Monoid[int]()
combined := M.ConcatAll(monoid)([]Endomorphism[int]{double, increment, square})
@@ -397,3 +513,211 @@ func BenchmarkChain(b *testing.B) {
_ = chained(5)
}
}
// TestFunctorLaws tests that endomorphisms satisfy the functor laws
func TestFunctorLaws(t *testing.T) {
// Functor Law 1: Identity
// map(id) = id
t.Run("Identity", func(t *testing.T) {
id := Identity[int]()
endo := double
// map(id)(endo) should equal endo
mapped := MonadMap(id, endo)
testValue := 5
assert.Equal(t, endo(testValue), mapped(testValue), "map(id) should equal id")
})
// Functor Law 2: Composition
// map(f . g) = map(f) . map(g)
t.Run("Composition", func(t *testing.T) {
f := double
g := increment
endo := square
// Left side: map(f . g)(endo)
composed := MonadCompose(f, g)
left := MonadMap(composed, endo)
// Right side: map(f)(map(g)(endo))
mappedG := MonadMap(g, endo)
right := MonadMap(f, mappedG)
testValue := 3
assert.Equal(t, left(testValue), right(testValue), "map(f . g) should equal map(f) . map(g)")
})
}
// TestApplicativeLaws tests that endomorphisms satisfy the applicative functor laws
func TestApplicativeLaws(t *testing.T) {
// Applicative Law 1: Identity
// ap(id, v) = v
t.Run("Identity", func(t *testing.T) {
id := Identity[int]()
v := double
applied := MonadAp(id, v)
testValue := 5
assert.Equal(t, v(testValue), applied(testValue), "ap(id, v) should equal v")
})
// Applicative Law 2: Composition
// ap(ap(ap(compose, u), v), w) = ap(u, ap(v, w))
t.Run("Composition", func(t *testing.T) {
u := double
v := increment
w := square
// For endomorphisms, ap is just composition
// Left side: ap(ap(ap(compose, u), v), w) = compose(compose(u, v), w)
left := MonadCompose(MonadCompose(u, v), w)
// Right side: ap(u, ap(v, w)) = compose(u, compose(v, w))
right := MonadCompose(u, MonadCompose(v, w))
testValue := 3
assert.Equal(t, left(testValue), right(testValue), "Applicative composition law")
})
// Applicative Law 3: Homomorphism
// ap(pure(f), pure(x)) = pure(f(x))
t.Run("Homomorphism", func(t *testing.T) {
// For endomorphisms, "pure" is just the identity function that returns a constant
// This law is trivially satisfied for endomorphisms
f := double
x := 5
// ap(f, id) applied to x should equal f(x)
id := Identity[int]()
applied := MonadAp(f, id)
assert.Equal(t, f(x), applied(x), "Homomorphism law")
})
}
// TestMonadLaws tests that endomorphisms satisfy the monad laws
func TestMonadLaws(t *testing.T) {
// Monad Law 1: Left Identity
// chain(pure(a), f) = f(a)
t.Run("LeftIdentity", func(t *testing.T) {
// For endomorphisms, "pure" is the identity function
// chain(id, f) = f
id := Identity[int]()
f := double
chained := MonadChain(id, f)
testValue := 5
assert.Equal(t, f(testValue), chained(testValue), "chain(id, f) should equal f")
})
// Monad Law 2: Right Identity
// chain(m, pure) = m
t.Run("RightIdentity", func(t *testing.T) {
m := double
id := Identity[int]()
chained := MonadChain(m, id)
testValue := 5
assert.Equal(t, m(testValue), chained(testValue), "chain(m, id) should equal m")
})
// Monad Law 3: Associativity
// chain(chain(m, f), g) = chain(m, x => chain(f(x), g))
t.Run("Associativity", func(t *testing.T) {
m := square
f := double
g := increment
// Left side: chain(chain(m, f), g)
left := MonadChain(MonadChain(m, f), g)
// Right side: chain(m, chain(f, g))
// For simple endomorphisms (not Kleisli arrows), this simplifies to:
right := MonadChain(m, MonadChain(f, g))
testValue := 3
assert.Equal(t, left(testValue), right(testValue), "Monad associativity law")
})
}
// TestMonadComposeVsMonadChain verifies the relationship between Compose and Chain
func TestMonadComposeVsMonadChain(t *testing.T) {
f := double
g := increment
// MonadCompose(f, g) should equal MonadChain(g, f)
// Because Compose is right-to-left and Chain is left-to-right
composed := MonadCompose(f, g)
chained := MonadChain(g, f)
testValue := 5
assert.Equal(t, composed(testValue), chained(testValue),
"MonadCompose(f, g) should equal MonadChain(g, f)")
}
// TestMapEqualsCompose verifies that Map is equivalent to Compose for endomorphisms
func TestMapEqualsCompose(t *testing.T) {
f := double
g := increment
// MonadMap(f, g) should equal MonadCompose(f, g)
mapped := MonadMap(f, g)
composed := MonadCompose(f, g)
testValue := 5
assert.Equal(t, composed(testValue), mapped(testValue),
"MonadMap should equal MonadCompose for endomorphisms")
// Curried versions
mapF := Map(f)
composeF := Compose(f)
mappedG := mapF(g)
composedG := composeF(g)
assert.Equal(t, composedG(testValue), mappedG(testValue),
"Map should equal Compose for endomorphisms (curried)")
}
// TestApEqualsCompose verifies that Ap is equivalent to Compose for endomorphisms
func TestApEqualsCompose(t *testing.T) {
f := double
g := increment
// MonadAp(f, g) should equal MonadCompose(f, g)
applied := MonadAp(f, g)
composed := MonadCompose(f, g)
testValue := 5
assert.Equal(t, composed(testValue), applied(testValue),
"MonadAp should equal MonadCompose for endomorphisms")
// Curried versions
apG := Ap(g)
composeG := Compose(g)
appliedF := apG(f)
composedF := composeG(f)
assert.Equal(t, composedF(testValue), appliedF(testValue),
"Ap should equal Compose for endomorphisms (curried)")
}
// TestChainFirst tests the ChainFirst operation
func TestChainFirst(t *testing.T) {
double := func(x int) int { return x * 2 }
// Track side effect
var sideEffect int
logEffect := func(x int) int {
sideEffect = x
return x + 100 // This result should be discarded
}
chained := MonadChainFirst(double, logEffect)
result := chained(5)
// Should return double's result (10), not logEffect's result
assert.Equal(t, 10, result, "ChainFirst should return first result")
// But side effect should have been executed with double's result
assert.Equal(t, 10, sideEffect, "ChainFirst should execute second function for effect")
}

View File

@@ -88,11 +88,15 @@ func Identity[A any]() Endomorphism[A] {
// For endomorphisms, this operation is composition (Compose). This means:
// - Concat(f, Concat(g, h)) = Concat(Concat(f, g), h)
//
// IMPORTANT: Concat uses Compose, which executes RIGHT-TO-LEFT:
// - Concat(f, g) applies g first, then f
// - This is equivalent to Compose(f, g)
//
// The returned semigroup can be used with semigroup operations to combine
// multiple endomorphisms.
//
// Returns:
// - A Semigroup[Endomorphism[A]] where concat is composition
// - A Semigroup[Endomorphism[A]] where concat is composition (right-to-left)
//
// Example:
//
@@ -102,11 +106,11 @@ func Identity[A any]() Endomorphism[A] {
// double := func(x int) int { return x * 2 }
// increment := func(x int) int { return x + 1 }
//
// // Combine using the semigroup
// // Combine using the semigroup (RIGHT-TO-LEFT execution)
// combined := sg.Concat(double, increment)
// result := combined(5) // (5 * 2) + 1 = 11
// result := combined(5) // (5 + 1) * 2 = 12 (increment first, then double)
func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
return S.MakeSemigroup(Compose[A])
return S.MakeSemigroup(MonadCompose[A])
}
// Monoid returns a Monoid for endomorphisms where concat is composition and empty is identity.
@@ -115,6 +119,10 @@ func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
// - The binary operation is composition (Compose)
// - The identity element is the identity function (Identity)
//
// IMPORTANT: Concat uses Compose, which executes RIGHT-TO-LEFT:
// - Concat(f, g) applies g first, then f
// - ConcatAll applies functions from right to left
//
// This satisfies the monoid laws:
// - Right identity: Concat(x, Empty) = x
// - Left identity: Concat(Empty, x) = x
@@ -124,7 +132,7 @@ func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
// combine multiple endomorphisms.
//
// Returns:
// - A Monoid[Endomorphism[A]] with composition and identity
// - A Monoid[Endomorphism[A]] with composition (right-to-left) and identity
//
// Example:
//
@@ -135,9 +143,9 @@ func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
// increment := func(x int) int { return x + 1 }
// square := func(x int) int { return x * x }
//
// // Combine multiple endomorphisms
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)
// combined := M.ConcatAll(monoid)(double, increment, square)
// result := combined(5) // ((5 * 2) + 1) * ((5 * 2) + 1) = 121
// result := combined(5) // square(increment(double(5))) = square(increment(10)) = square(11) = 121
func Monoid[A any]() M.Monoid[Endomorphism[A]] {
return M.MakeMonoid(Compose[A], Identity[A]())
return M.MakeMonoid(MonadCompose[A], Identity[A]())
}

View File

@@ -37,7 +37,7 @@ type (
// var g endomorphism.Endomorphism[int] = increment
Endomorphism[A any] = func(A) A
Kleisli[A, B any] = func(A) Endomorphism[B]
Kleisli[A any] = func(A) Endomorphism[A]
// Operator represents a transformation from one endomorphism to another.
//
@@ -54,5 +54,5 @@ type (
// return strconv.Itoa(result)
// }
// }
Operator[A, B any] = Kleisli[Endomorphism[A], B]
Operator[A any] = Endomorphism[Endomorphism[A]]
)

View File

@@ -15,7 +15,84 @@
package eq
// Contramap implements an Equals predicate based on a mapping
// Contramap creates an Eq[B] from an Eq[A] by providing a function that maps B to A.
// This is a contravariant functor operation that allows you to transform equality predicates
// by mapping the input type. It's particularly useful for comparing complex types by
// extracting comparable fields.
//
// The name "contramap" comes from category theory, where it represents a contravariant
// functor. Unlike regular map (covariant), which transforms the output, contramap
// transforms the input in the opposite direction.
//
// Type Parameters:
// - A: The type that has an existing Eq instance
// - B: The type for which we want to create an Eq instance
//
// Parameters:
// - f: A function that extracts or converts a value of type B to type A
//
// Returns:
// - A function that takes an Eq[A] and returns an Eq[B]
//
// The resulting Eq[B] compares two B values by:
// 1. Applying f to both values to get A values
// 2. Using the original Eq[A] to compare those A values
//
// Example - Compare structs by a single field:
//
// type Person struct {
// ID int
// Name string
// Age int
// }
//
// // Compare persons by ID only
// personEqByID := eq.Contramap(func(p Person) int {
// return p.ID
// })(eq.FromStrictEquals[int]())
//
// p1 := Person{ID: 1, Name: "Alice", Age: 30}
// p2 := Person{ID: 1, Name: "Bob", Age: 25}
// assert.True(t, personEqByID.Equals(p1, p2)) // Same ID, different names
//
// Example - Case-insensitive string comparison:
//
// type User struct {
// Username string
// Email string
// }
//
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
//
// userEqByUsername := eq.Contramap(func(u User) string {
// return u.Username
// })(caseInsensitiveEq)
//
// u1 := User{Username: "Alice", Email: "alice@example.com"}
// u2 := User{Username: "ALICE", Email: "different@example.com"}
// assert.True(t, userEqByUsername.Equals(u1, u2)) // Case-insensitive match
//
// Example - Nested field access:
//
// type Address struct {
// City string
// }
//
// type Person struct {
// Name string
// Address Address
// }
//
// // Compare persons by city
// personEqByCity := eq.Contramap(func(p Person) string {
// return p.Address.City
// })(eq.FromStrictEquals[string]())
//
// Contramap Law:
// Contramap must satisfy: Contramap(f)(Contramap(g)(eq)) = Contramap(g ∘ f)(eq)
// This means contramapping twice is the same as contramapping with the composed function.
func Contramap[A, B any](f func(b B) A) func(Eq[A]) Eq[B] {
return func(fa Eq[A]) Eq[B] {
equals := fa.Equals

View File

@@ -19,38 +19,188 @@ import (
F "github.com/IBM/fp-go/v2/function"
)
// Eq represents an equality type class for type T.
// It provides a way to define custom equality semantics for any type,
// not just those that are comparable with Go's == operator.
//
// Type Parameters:
// - T: The type for which equality is defined
//
// Methods:
// - Equals(x, y T) bool: Returns true if x and y are considered equal
//
// Laws:
// An Eq instance must satisfy the equivalence relation laws:
// 1. Reflexivity: Equals(x, x) = true for all x
// 2. Symmetry: Equals(x, y) = Equals(y, x) for all x, y
// 3. Transitivity: If Equals(x, y) and Equals(y, z), then Equals(x, z)
//
// Example:
//
// // Create an equality predicate for integers
// intEq := eq.FromStrictEquals[int]()
// assert.True(t, intEq.Equals(42, 42))
// assert.False(t, intEq.Equals(42, 43))
//
// // Create a custom equality predicate
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
// assert.True(t, caseInsensitiveEq.Equals("Hello", "HELLO"))
type Eq[T any] interface {
// Equals returns true if x and y are considered equal according to this equality predicate.
//
// Parameters:
// - x: The first value to compare
// - y: The second value to compare
//
// Returns:
// - true if x and y are equal, false otherwise
Equals(x, y T) bool
}
// eq is the internal implementation of the Eq interface.
// It wraps a comparison function to provide the Eq interface.
type eq[T any] struct {
c func(x, y T) bool
}
// Equals implements the Eq interface by delegating to the wrapped comparison function.
func (e eq[T]) Equals(x, y T) bool {
return e.c(x, y)
}
// strictEq is a helper function that uses Go's built-in == operator for comparison.
// It can only be used with comparable types.
func strictEq[A comparable](a, b A) bool {
return a == b
}
// FromStrictEquals constructs an [EQ.Eq] from the canonical comparison function
// FromStrictEquals constructs an Eq instance using Go's built-in == operator.
// This is the most common way to create an Eq for types that support ==.
//
// Type Parameters:
// - T: Must be a comparable type (supports ==)
//
// Returns:
// - An Eq[T] that uses == for equality comparison
//
// Example:
//
// intEq := eq.FromStrictEquals[int]()
// assert.True(t, intEq.Equals(42, 42))
// assert.False(t, intEq.Equals(42, 43))
//
// stringEq := eq.FromStrictEquals[string]()
// assert.True(t, stringEq.Equals("hello", "hello"))
// assert.False(t, stringEq.Equals("hello", "world"))
//
// Note: For types that are not comparable or require custom equality logic,
// use FromEquals instead.
func FromStrictEquals[T comparable]() Eq[T] {
return FromEquals(strictEq[T])
}
// FromEquals constructs an [EQ.Eq] from the comparison function
// FromEquals constructs an Eq instance from a custom comparison function.
// This allows defining equality for any type, including non-comparable types
// or types that need custom equality semantics.
//
// Type Parameters:
// - T: The type for which equality is being defined (can be any type)
//
// Parameters:
// - c: A function that takes two values of type T and returns true if they are equal
//
// Returns:
// - An Eq[T] that uses the provided function for equality comparison
//
// Example:
//
// // Case-insensitive string equality
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
// assert.True(t, caseInsensitiveEq.Equals("Hello", "HELLO"))
//
// // Approximate float equality
// approxEq := eq.FromEquals(func(a, b float64) bool {
// return math.Abs(a-b) < 0.0001
// })
// assert.True(t, approxEq.Equals(1.0, 1.00009))
//
// // Custom struct equality (compare by specific fields)
// type Person struct { ID int; Name string }
// personEq := eq.FromEquals(func(a, b Person) bool {
// return a.ID == b.ID // Compare only by ID
// })
//
// Note: The provided function should satisfy the equivalence relation laws
// (reflexivity, symmetry, transitivity) for correct behavior.
func FromEquals[T any](c func(x, y T) bool) Eq[T] {
return eq[T]{c: c}
}
// Empty returns the equals predicate that is always true
// Empty returns an Eq instance that always returns true for any comparison.
// This is the identity element for the Eq Monoid and is useful when you need
// an equality predicate that accepts everything.
//
// Type Parameters:
// - T: The type for which the always-true equality is defined
//
// Returns:
// - An Eq[T] where Equals(x, y) always returns true
//
// Example:
//
// alwaysTrue := eq.Empty[int]()
// assert.True(t, alwaysTrue.Equals(1, 2))
// assert.True(t, alwaysTrue.Equals(42, 100))
//
// // Useful as identity in monoid operations
// monoid := eq.Monoid[string]()
// combined := monoid.Concat(eq.FromStrictEquals[string](), monoid.Empty())
// // combined behaves the same as FromStrictEquals
//
// Use cases:
// - As the identity element in Monoid operations
// - When you need a placeholder equality that accepts everything
// - In generic code that requires an Eq but doesn't need actual comparison
func Empty[T any]() Eq[T] {
return FromEquals(F.Constant2[T, T](true))
}
// Equals returns a predicate to test if one value equals the other under an equals predicate
// Equals returns a curried equality checking function.
// This is useful for partial application and functional composition.
//
// Type Parameters:
// - T: The type being compared
//
// Parameters:
// - eq: The Eq instance to use for comparison
//
// Returns:
// - A function that takes a value and returns another function that checks equality with that value
//
// Example:
//
// intEq := eq.FromStrictEquals[int]()
// equals42 := eq.Equals(intEq)(42)
//
// assert.True(t, equals42(42))
// assert.False(t, equals42(43))
//
// // Use in higher-order functions
// numbers := []int{40, 41, 42, 43, 44}
// filtered := array.Filter(equals42)(numbers)
// // filtered = [42]
//
// // Partial application
// equalsFunc := eq.Equals(intEq)
// equals10 := equalsFunc(10)
// equals20 := equalsFunc(20)
//
// This is particularly useful when working with functional programming patterns
// like map, filter, and other higher-order functions.
func Equals[T any](eq Eq[T]) func(T) func(T) bool {
return func(other T) func(T) bool {
return F.Bind2nd(eq.Equals, other)

View File

@@ -20,6 +20,65 @@ import (
S "github.com/IBM/fp-go/v2/semigroup"
)
// Semigroup returns a Semigroup instance for Eq[A].
// A Semigroup provides a way to combine two values of the same type.
// For Eq, the combination uses logical AND - two values are equal only if
// they are equal according to BOTH equality predicates.
//
// Type Parameters:
// - A: The type for which equality predicates are being combined
//
// Returns:
// - A Semigroup[Eq[A]] that combines equality predicates with logical AND
//
// The Concat operation satisfies:
// - Associativity: Concat(Concat(x, y), z) = Concat(x, Concat(y, z))
//
// Example - Combine multiple equality checks:
//
// type User struct {
// Username string
// Email string
// }
//
// usernameEq := eq.Contramap(func(u User) string {
// return u.Username
// })(eq.FromStrictEquals[string]())
//
// emailEq := eq.Contramap(func(u User) string {
// return u.Email
// })(eq.FromStrictEquals[string]())
//
// // Users are equal only if BOTH username AND email match
// userEq := eq.Semigroup[User]().Concat(usernameEq, emailEq)
//
// u1 := User{Username: "alice", Email: "alice@example.com"}
// u2 := User{Username: "alice", Email: "alice@example.com"}
// u3 := User{Username: "alice", Email: "different@example.com"}
//
// assert.True(t, userEq.Equals(u1, u2)) // Both match
// assert.False(t, userEq.Equals(u1, u3)) // Email differs
//
// Example - Combine multiple field checks:
//
// type Product struct {
// ID int
// Name string
// Price float64
// }
//
// idEq := eq.Contramap(func(p Product) int { return p.ID })(eq.FromStrictEquals[int]())
// nameEq := eq.Contramap(func(p Product) string { return p.Name })(eq.FromStrictEquals[string]())
// priceEq := eq.Contramap(func(p Product) float64 { return p.Price })(eq.FromStrictEquals[float64]())
//
// sg := eq.Semigroup[Product]()
// // All three fields must match
// productEq := sg.Concat(sg.Concat(idEq, nameEq), priceEq)
//
// Use cases:
// - Combining multiple field comparisons for struct equality
// - Building complex equality predicates from simpler ones
// - Ensuring all conditions are met (logical AND of predicates)
func Semigroup[A any]() S.Semigroup[Eq[A]] {
return S.MakeSemigroup(func(x, y Eq[A]) Eq[A] {
return FromEquals(func(a, b A) bool {
@@ -28,6 +87,67 @@ func Semigroup[A any]() S.Semigroup[Eq[A]] {
})
}
// Monoid returns a Monoid instance for Eq[A].
// A Monoid extends Semigroup with an identity element (Empty).
// For Eq, the identity is an equality predicate that always returns true.
//
// Type Parameters:
// - A: The type for which the equality monoid is defined
//
// Returns:
// - A Monoid[Eq[A]] with:
// - Concat: Combines equality predicates with logical AND (from Semigroup)
// - Empty: An equality predicate that always returns true (identity element)
//
// Monoid Laws:
// 1. Left Identity: Concat(Empty(), x) = x
// 2. Right Identity: Concat(x, Empty()) = x
// 3. Associativity: Concat(Concat(x, y), z) = Concat(x, Concat(y, z))
//
// Example - Using the identity element:
//
// monoid := eq.Monoid[int]()
// intEq := eq.FromStrictEquals[int]()
//
// // Empty is the identity - combining with it doesn't change behavior
// leftIdentity := monoid.Concat(monoid.Empty(), intEq)
// rightIdentity := monoid.Concat(intEq, monoid.Empty())
//
// assert.True(t, leftIdentity.Equals(42, 42))
// assert.False(t, leftIdentity.Equals(42, 43))
// assert.True(t, rightIdentity.Equals(42, 42))
// assert.False(t, rightIdentity.Equals(42, 43))
//
// Example - Empty always returns true:
//
// monoid := eq.Monoid[string]()
// alwaysTrue := monoid.Empty()
//
// assert.True(t, alwaysTrue.Equals("hello", "world"))
// assert.True(t, alwaysTrue.Equals("same", "same"))
// assert.True(t, alwaysTrue.Equals("", "anything"))
//
// Example - Building complex equality with fold:
//
// type Person struct {
// FirstName string
// LastName string
// Age int
// }
//
// firstNameEq := eq.Contramap(func(p Person) string { return p.FirstName })(eq.FromStrictEquals[string]())
// lastNameEq := eq.Contramap(func(p Person) string { return p.LastName })(eq.FromStrictEquals[string]())
// ageEq := eq.Contramap(func(p Person) int { return p.Age })(eq.FromStrictEquals[int]())
//
// monoid := eq.Monoid[Person]()
// // Combine all predicates - all fields must match
// personEq := monoid.Concat(monoid.Concat(firstNameEq, lastNameEq), ageEq)
//
// Use cases:
// - Providing a neutral element for equality combinations
// - Generic algorithms that require a Monoid instance
// - Folding multiple equality predicates into one
// - Default "accept everything" equality predicate
func Monoid[A any]() M.Monoid[Eq[A]] {
return M.MakeMonoid(Semigroup[A]().Concat, Empty[A]())
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,105 @@
package function
// Flip reverses the order of parameters of a curried function
// Flip reverses the order of parameters of a curried function.
//
// Given a curried function f that takes T1 then T2 and returns R,
// Flip returns a new curried function that takes T2 then T1 and returns R.
// This is useful when you have a curried function but need to apply its
// arguments in a different order.
//
// Mathematical notation:
// - Given: f: T1 → T2 → R
// - Returns: g: T2 → T1 → R where g(t2)(t1) = f(t1)(t2)
//
// Type Parameters:
// - T1: The type of the first parameter (becomes second after flip)
// - T2: The type of the second parameter (becomes first after flip)
// - R: The return type
//
// Parameters:
// - f: A curried function taking T1 then T2 and returning R
//
// Returns:
// - A new curried function taking T2 then T1 and returning R
//
// Relationship to Swap:
//
// Flip is the curried version of Swap. While Swap works with binary functions,
// Flip works with curried functions:
// - Swap: func(T1, T2) R → func(T2, T1) R
// - Flip: func(T1) func(T2) R → func(T2) func(T1) R
//
// Example - Basic usage:
//
// // Create a curried division function
// divide := Curry2(func(a, b float64) float64 { return a / b })
// // divide(10)(2) = 5.0 (10 / 2)
//
// // Flip the parameter order
// divideFlipped := Flip(divide)
// // divideFlipped(10)(2) = 0.2 (2 / 10)
//
// Example - String formatting:
//
// // Curried string formatter: format(template)(value)
// format := Curry2(func(template, value string) string {
// return fmt.Sprintf(template, value)
// })
//
// // Normal order: template first, then value
// result1 := format("Hello, %s!")("World") // "Hello, World!"
//
// // Flipped order: value first, then template
// formatFlipped := Flip(format)
// result2 := formatFlipped("Hello, %s!")("World") // "Hello, World!"
//
// // Useful for partial application in different order
// greetWorld := format("Hello, %s!")
// greetWorld("Alice") // "Hello, Alice!"
//
// formatAlice := formatFlipped("Alice")
// formatAlice("Hello, %s!") // "Hello, Alice!"
//
// Example - Practical use case with map operations:
//
// // Curried map lookup: getFrom(map)(key)
// getFrom := Curry2(func(m map[string]int, key string) int {
// return m[key]
// })
//
// data := map[string]int{"a": 1, "b": 2, "c": 3}
//
// // Create a getter for this specific map
// getValue := getFrom(data)
// getValue("a") // 1
//
// // Flip to create key-first version: get(key)(map)
// get := Flip(getFrom)
// getA := get("a")
// getA(data) // 1
//
// Example - Combining with other functional patterns:
//
// // Curried append: append(slice)(element)
// appendTo := Curry2(func(slice []int, elem int) []int {
// return append(slice, elem)
// })
//
// // Flip to get: prepend(element)(slice)
// prepend := Flip(appendTo)
//
// nums := []int{1, 2, 3}
// add4 := appendTo(nums)
// result1 := add4(4) // [1, 2, 3, 4]
//
// prependZero := prepend(0)
// result2 := prependZero(nums) // [1, 2, 3, 0]
//
// See also:
// - Swap: For flipping parameters of non-curried binary functions
// - Curry2: For converting binary functions to curried form
// - Uncurry2: For converting curried functions back to binary form
func Flip[T1, T2, R any](f func(T1) func(T2) R) func(T2) func(T1) R {
return func(t2 T2) func(T1) R {
return func(t1 T1) R {

View File

@@ -22,15 +22,265 @@ import (
"github.com/stretchr/testify/assert"
)
// TestFlip tests the Flip function with various scenarios
func TestFlip(t *testing.T) {
t.Run("flips string concatenation", func(t *testing.T) {
// Create a curried function that formats strings
format := Curry2(func(a, b string) string {
return fmt.Sprintf("%s:%s", a, b)
})
x := Curry2(func(a, b string) string {
return fmt.Sprintf("%s:%s", a, b)
// Original order: a then b
assert.Equal(t, "a:b", format("a")("b"))
assert.Equal(t, "hello:world", format("hello")("world"))
// Flipped order: b then a
flipped := Flip(format)
assert.Equal(t, "b:a", flipped("a")("b"))
assert.Equal(t, "world:hello", flipped("hello")("world"))
})
assert.Equal(t, "a:b", x("a")("b"))
t.Run("flips numeric operations", func(t *testing.T) {
// Curried subtraction: subtract(a)(b) = a - b
subtract := Curry2(func(a, b int) int {
return a - b
})
y := Flip(x)
// Original: 10 - 3 = 7
assert.Equal(t, 7, subtract(10)(3))
assert.Equal(t, "b:a", y("a")("b"))
// Flipped: 3 - 10 = -7
flipped := Flip(subtract)
assert.Equal(t, -7, flipped(10)(3))
})
t.Run("flips division", func(t *testing.T) {
// Curried division: divide(a)(b) = a / b
divide := Curry2(func(a, b float64) float64 {
return a / b
})
// Original: 10 / 2 = 5.0
assert.Equal(t, 5.0, divide(10)(2))
// Flipped: 2 / 10 = 0.2
flipped := Flip(divide)
assert.Equal(t, 0.2, flipped(10)(2))
})
t.Run("flips with partial application", func(t *testing.T) {
// Curried append-like operation
prepend := Curry2(func(prefix, text string) string {
return prefix + text
})
// Create specialized functions with original order
addHello := prepend("Hello, ")
assert.Equal(t, "Hello, World", addHello("World"))
assert.Equal(t, "Hello, Go", addHello("Go"))
// Flip and create specialized functions with reversed order
flipped := Flip(prepend)
addToWorld := flipped("World")
assert.Equal(t, "Hello, World", addToWorld("Hello, "))
assert.Equal(t, "Goodbye, World", addToWorld("Goodbye, "))
})
t.Run("flips with different types", func(t *testing.T) {
// Curried function with different input types
repeat := Curry2(func(s string, n int) string {
result := ""
for i := 0; i < n; i++ {
result += s
}
return result
})
// Original: repeat("x")(3) = "xxx"
assert.Equal(t, "xxx", repeat("x")(3))
assert.Equal(t, "abab", repeat("ab")(2))
// Flipped: repeat(3)("x") = "xxx"
flipped := Flip(repeat)
assert.Equal(t, "xxx", flipped(3)("x"))
assert.Equal(t, "abab", flipped(2)("ab"))
})
t.Run("double flip returns to original", func(t *testing.T) {
// Flipping twice should return to original behavior
original := Curry2(func(a, b string) string {
return a + "-" + b
})
flipped := Flip(original)
doubleFlipped := Flip(flipped)
// Original and double-flipped should behave the same
assert.Equal(t, original("a")("b"), doubleFlipped("a")("b"))
assert.Equal(t, "a-b", doubleFlipped("a")("b"))
})
t.Run("flips with complex types", func(t *testing.T) {
type Person struct {
Name string
Age int
}
// Curried function creating a person
makePerson := Curry2(func(name string, age int) Person {
return Person{Name: name, Age: age}
})
// Original order: name then age
alice := makePerson("Alice")(30)
assert.Equal(t, "Alice", alice.Name)
assert.Equal(t, 30, alice.Age)
// Flipped order: age then name
flipped := Flip(makePerson)
bob := flipped(25)("Bob")
assert.Equal(t, "Bob", bob.Name)
assert.Equal(t, 25, bob.Age)
})
t.Run("flips map operations", func(t *testing.T) {
// Curried map getter: get(map)(key)
get := Curry2(func(m map[string]int, key string) int {
return m[key]
})
data := map[string]int{"a": 1, "b": 2, "c": 3}
// Original: provide map first, then key
getValue := get(data)
assert.Equal(t, 1, getValue("a"))
assert.Equal(t, 2, getValue("b"))
// Flipped: provide key first, then map
flipped := Flip(get)
getA := flipped("a")
assert.Equal(t, 1, getA(data))
data2 := map[string]int{"a": 10, "b": 20}
assert.Equal(t, 10, getA(data2))
})
t.Run("flips boolean operations", func(t *testing.T) {
// Curried logical operation
implies := Curry2(func(a, b bool) bool {
return !a || b
})
// Test truth table for implication
assert.True(t, implies(true)(true)) // T → T = T
assert.False(t, implies(true)(false)) // T → F = F
assert.True(t, implies(false)(true)) // F → T = T
assert.True(t, implies(false)(false)) // F → F = T
// Flipped version (reverse implication)
flipped := Flip(implies)
assert.True(t, flipped(true)(true)) // T ← T = T
assert.True(t, flipped(true)(false)) // T ← F = T
assert.False(t, flipped(false)(true)) // F ← T = F
assert.True(t, flipped(false)(false)) // F ← F = T
})
t.Run("flips with slice operations", func(t *testing.T) {
// Curried slice append
appendTo := Curry2(func(slice []int, elem int) []int {
return append(slice, elem)
})
nums := []int{1, 2, 3}
// Original: provide slice first, then element
add4 := appendTo(nums)
result1 := add4(4)
assert.Equal(t, []int{1, 2, 3, 4}, result1)
// Flipped: provide element first, then slice
flipped := Flip(appendTo)
appendFive := flipped(5)
result2 := appendFive(nums)
assert.Equal(t, []int{1, 2, 3, 5}, result2)
})
}
// TestFlipProperties tests mathematical properties of Flip
func TestFlipProperties(t *testing.T) {
t.Run("flip is involutive (flip . flip = id)", func(t *testing.T) {
// Flipping twice should give back the original function behavior
original := Curry2(func(a, b int) int {
return a*10 + b
})
flipped := Flip(original)
doubleFlipped := Flip(flipped)
// Test with multiple inputs
testCases := []struct{ a, b int }{
{1, 2},
{5, 7},
{0, 0},
{-1, 3},
}
for _, tc := range testCases {
assert.Equal(t,
original(tc.a)(tc.b),
doubleFlipped(tc.a)(tc.b),
"flip(flip(f)) should equal f for inputs (%d, %d)", tc.a, tc.b)
}
})
t.Run("flip preserves function composition", func(t *testing.T) {
// If we have f: A → B → C and g: C → D
// then g ∘ f(a)(b) = g(f(a)(b))
// and g ∘ flip(f)(b)(a) = g(flip(f)(b)(a))
f := Curry2(func(a, b int) int {
return a + b
})
g := func(n int) int {
return n * 2
}
flippedF := Flip(f)
// Compose g with f
composed1 := func(a, b int) int {
return g(f(a)(b))
}
// Compose g with flipped f
composed2 := func(a, b int) int {
return g(flippedF(b)(a))
}
// Both should give the same result
assert.Equal(t, composed1(3, 5), composed2(3, 5))
assert.Equal(t, 16, composed1(3, 5)) // (3 + 5) * 2 = 16
})
}
// BenchmarkFlip benchmarks the Flip function
func BenchmarkFlip(b *testing.B) {
add := Curry2(func(a, b int) int {
return a + b
})
flipped := Flip(add)
b.Run("original", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = add(i)(i + 1)
}
})
b.Run("flipped", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = flipped(i)(i + 1)
}
})
}

View File

@@ -80,7 +80,6 @@ import (
A "github.com/IBM/fp-go/v2/array"
B "github.com/IBM/fp-go/v2/bytes"
E "github.com/IBM/fp-go/v2/either"
ENDO "github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
C "github.com/IBM/fp-go/v2/http/content"
@@ -91,16 +90,17 @@ import (
L "github.com/IBM/fp-go/v2/optics/lens"
O "github.com/IBM/fp-go/v2/option"
R "github.com/IBM/fp-go/v2/record"
"github.com/IBM/fp-go/v2/result"
S "github.com/IBM/fp-go/v2/string"
T "github.com/IBM/fp-go/v2/tuple"
)
type (
Builder struct {
method O.Option[string]
method Option[string]
url string
headers http.Header
body O.Option[E.Either[error, []byte]]
body Option[Result[[]byte]]
query url.Values
}
@@ -117,19 +117,19 @@ var (
// Monoid is the [M.Monoid] for the [Endomorphism]
Monoid = ENDO.Monoid[*Builder]()
// Url is a [L.Lens] for the URL
// Url is a [Lens] for the URL
//
// Deprecated: use [URL] instead
Url = L.MakeLensRef((*Builder).GetURL, (*Builder).SetURL)
// URL is a [L.Lens] for the URL
// URL is a [Lens] for the URL
URL = L.MakeLensRef((*Builder).GetURL, (*Builder).SetURL)
// Method is a [L.Lens] for the HTTP method
// Method is a [Lens] for the HTTP method
Method = L.MakeLensRef((*Builder).GetMethod, (*Builder).SetMethod)
// Body is a [L.Lens] for the request body
// Body is a [Lens] for the request body
Body = L.MakeLensRef((*Builder).GetBody, (*Builder).SetBody)
// Headers is a [L.Lens] for the complete set of request headers
// Headers is a [Lens] for the complete set of request headers
Headers = L.MakeLensRef((*Builder).GetHeaders, (*Builder).SetHeaders)
// Query is a [L.Lens] for the set of query parameters
// Query is a [Lens] for the set of query parameters
Query = L.MakeLensRef((*Builder).GetQuery, (*Builder).SetQuery)
rawQuery = L.MakeLensRef(getRawQuery, setRawQuery)
@@ -139,11 +139,11 @@ var (
setHeader = F.Bind2of3((*Builder).SetHeader)
noHeader = O.None[string]()
noBody = O.None[E.Either[error, []byte]]()
noBody = O.None[Result[[]byte]]()
noQueryArg = O.None[string]()
parseURL = E.Eitherize1(url.Parse)
parseQuery = E.Eitherize1(url.ParseQuery)
parseURL = result.Eitherize1(url.Parse)
parseQuery = result.Eitherize1(url.ParseQuery)
// WithQuery creates a [Endomorphism] for a complete set of query parameters
WithQuery = Query.Set
@@ -159,12 +159,12 @@ var (
WithHeaders = Headers.Set
// WithBody creates a [Endomorphism] for a request body
WithBody = F.Flow2(
O.Of[E.Either[error, []byte]],
O.Of[Result[[]byte]],
Body.Set,
)
// WithBytes creates a [Endomorphism] for a request body using bytes
WithBytes = F.Flow2(
E.Of[error, []byte],
result.Of[[]byte],
WithBody,
)
// WithContentType adds the [H.ContentType] header
@@ -202,7 +202,7 @@ var (
)
// bodyAsBytes returns a []byte with a fallback to the empty array
bodyAsBytes = O.Fold(B.Empty, E.Fold(F.Ignore1of1[error](B.Empty), F.Identity[[]byte]))
bodyAsBytes = O.Fold(B.Empty, result.Fold(F.Ignore1of1[error](B.Empty), F.Identity[[]byte]))
)
func setRawQuery(u *url.URL, raw string) *url.URL {
@@ -223,35 +223,35 @@ func (builder *Builder) clone() *Builder {
// GetTargetUrl constructs a full URL with query parameters on top of the provided URL string
//
// Deprecated: use [GetTargetURL] instead
func (builder *Builder) GetTargetUrl() E.Either[error, string] {
func (builder *Builder) GetTargetUrl() Result[string] {
return builder.GetTargetURL()
}
// GetTargetURL constructs a full URL with query parameters on top of the provided URL string
func (builder *Builder) GetTargetURL() E.Either[error, string] {
func (builder *Builder) GetTargetURL() Result[string] {
// construct the final URL
return F.Pipe3(
builder,
Url.Get,
parseURL,
E.Chain(F.Flow4(
result.Chain(F.Flow4(
T.Replicate2[*url.URL],
T.Map2(
F.Flow2(
F.Curry2(setRawQuery),
E.Of[error, func(string) *url.URL],
result.Of[func(string) *url.URL],
),
F.Flow3(
rawQuery.Get,
parseQuery,
E.Map[error](F.Flow2(
result.Map(F.Flow2(
F.Curry2(FM.ValuesMonoid.Concat)(builder.GetQuery()),
(url.Values).Encode,
)),
),
),
T.Tupled2(E.MonadAp[*url.URL, error, string]),
E.Map[error]((*url.URL).String),
T.Tupled2(result.MonadAp[*url.URL, string]),
result.Map((*url.URL).String),
)),
)
}
@@ -285,7 +285,7 @@ func (builder *Builder) SetQuery(query url.Values) *Builder {
return builder
}
func (builder *Builder) GetBody() O.Option[E.Either[error, []byte]] {
func (builder *Builder) GetBody() Option[Result[[]byte]] {
return builder.body
}
@@ -310,7 +310,7 @@ func (builder *Builder) SetHeaders(headers http.Header) *Builder {
return builder
}
func (builder *Builder) SetBody(body O.Option[E.Either[error, []byte]]) *Builder {
func (builder *Builder) SetBody(body Option[Result[[]byte]]) *Builder {
builder.body = body
return builder
}
@@ -325,7 +325,7 @@ func (builder *Builder) DelHeader(name string) *Builder {
return builder
}
func (builder *Builder) GetHeader(name string) O.Option[string] {
func (builder *Builder) GetHeader(name string) Option[string] {
return F.Pipe2(
name,
builder.headers.Get,
@@ -342,8 +342,8 @@ func (builder *Builder) GetHash() string {
return MakeHash(builder)
}
// Header returns a [L.Lens] for a single header
func Header(name string) L.Lens[*Builder, O.Option[string]] {
// Header returns a [Lens] for a single header
func Header(name string) Lens[*Builder, Option[string]] {
get := getHeader(name)
set := F.Bind1of2(setHeader(name))
del := F.Flow2(
@@ -351,7 +351,7 @@ func Header(name string) L.Lens[*Builder, O.Option[string]] {
LZ.Map(delHeader(name)),
)
return L.MakeLens(get, func(b *Builder, value O.Option[string]) *Builder {
return L.MakeLens(get, func(b *Builder, value Option[string]) *Builder {
cpy := b.clone()
return F.Pipe1(
value,
@@ -392,8 +392,8 @@ func WithJSON[T any](data T) Endomorphism {
)
}
// QueryArg is a [L.Lens] for the first value of a query argument
func QueryArg(name string) L.Lens[*Builder, O.Option[string]] {
// QueryArg is a [Lens] for the first value of a query argument
func QueryArg(name string) Lens[*Builder, Option[string]] {
return F.Pipe1(
Query,
L.Compose[*Builder](FM.AtValue(name)),

13
v2/http/builder/type.go Normal file
View File

@@ -0,0 +1,13 @@
package builder
import (
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/result"
)
type (
Option[T any] = option.Option[T]
Result[T any] = result.Result[T]
Lens[S, T any] = lens.Lens[S, T]
)

View File

@@ -78,12 +78,26 @@ func Ap[A, B, HKTFAB, HKTFGAB, HKTFA, HKTFB any](
return apply.Ap(fap, fmap, O.Ap[B, A], fa)
}
func MatchE[A, HKTEA, HKTB any](mchain func(HKTEA, func(O.Option[A]) HKTB) HKTB, onNone func() HKTB, onSome func(A) HKTB) func(HKTEA) HKTB {
return F.Bind2nd(mchain, O.Fold(onNone, onSome))
func MonadMatchE[A, HKTEA, HKTB any](
fa HKTEA,
mchain func(HKTEA, func(O.Option[A]) HKTB) HKTB,
onNone func() HKTB,
onSome func(A) HKTB) HKTB {
return mchain(fa, O.Fold(onNone, onSome))
}
func MatchE[A, HKTEA, HKTB any](
mchain func(func(O.Option[A]) HKTB) func(HKTEA) HKTB,
onNone func() HKTB,
onSome func(A) HKTB) func(HKTEA) HKTB {
return mchain(O.Fold(onNone, onSome))
}
//go:inline
func GetOrElse[A, HKTEA, HKTB any](mchain func(HKTEA, func(O.Option[A]) HKTB) HKTB, onNone func() HKTB, onSome func(A) HKTB) func(HKTEA) HKTB {
func GetOrElse[A, HKTEA, HKTB any](
mchain func(func(O.Option[A]) HKTB) func(HKTEA) HKTB,
onNone func() HKTB,
onSome func(A) HKTB) func(HKTEA) HKTB {
return MatchE(mchain, onNone, onSome)
}

View File

@@ -233,7 +233,7 @@ func After[GA ~func() O.Option[A], A any](timestamp time.Time) func(GA) GA {
// Fold convers an IOOption into an IO
func Fold[GA ~func() O.Option[A], GB ~func() B, A, B any](onNone func() GB, onSome func(A) GB) func(GA) GB {
return optiont.MatchE(IO.MonadChain[GA, GB, O.Option[A], B], onNone, onSome)
return optiont.MatchE(IO.Chain[GA, GB, O.Option[A], B], onNone, onSome)
}
// Defer creates an IO by creating a brand new IO via a generator function, each time

View File

@@ -24,6 +24,7 @@ import (
"github.com/IBM/fp-go/v2/internal/fromio"
"github.com/IBM/fp-go/v2/internal/optiont"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/lazy"
"github.com/IBM/fp-go/v2/option"
)
@@ -47,7 +48,7 @@ func FromOption[A any](o Option[A]) IOOption[A] {
return io.Of(o)
}
func ChainOptionK[A, B any](f func(A) Option[B]) func(IOOption[A]) IOOption[B] {
func ChainOptionK[A, B any](f func(A) Option[B]) Operator[A, B] {
return optiont.ChainOptionK(
io.Chain[Option[A], Option[B]],
FromOption[B],
@@ -55,7 +56,7 @@ func ChainOptionK[A, B any](f func(A) Option[B]) func(IOOption[A]) IOOption[B] {
)
}
func MonadChainIOK[A, B any](ma IOOption[A], f func(A) IO[B]) IOOption[B] {
func MonadChainIOK[A, B any](ma IOOption[A], f io.Kleisli[A, B]) IOOption[B] {
return fromio.MonadChainIOK(
MonadChain[A, B],
FromIO[B],
@@ -64,7 +65,7 @@ func MonadChainIOK[A, B any](ma IOOption[A], f func(A) IO[B]) IOOption[B] {
)
}
func ChainIOK[A, B any](f func(A) IO[B]) func(IOOption[A]) IOOption[B] {
func ChainIOK[A, B any](f io.Kleisli[A, B]) Operator[A, B] {
return fromio.ChainIOK(
Chain[A, B],
FromIO[B],
@@ -80,15 +81,15 @@ func MonadMap[A, B any](fa IOOption[A], f func(A) B) IOOption[B] {
return optiont.MonadMap(io.MonadMap[Option[A], Option[B]], fa, f)
}
func Map[A, B any](f func(A) B) func(IOOption[A]) IOOption[B] {
func Map[A, B any](f func(A) B) Operator[A, B] {
return optiont.Map(io.Map[Option[A], Option[B]], f)
}
func MonadChain[A, B any](fa IOOption[A], f func(A) IOOption[B]) IOOption[B] {
func MonadChain[A, B any](fa IOOption[A], f Kleisli[A, B]) IOOption[B] {
return optiont.MonadChain(io.MonadChain[Option[A], Option[B]], io.MonadOf[Option[B]], fa, f)
}
func Chain[A, B any](f func(A) IOOption[B]) func(IOOption[A]) IOOption[B] {
func Chain[A, B any](f Kleisli[A, B]) Operator[A, B] {
return optiont.Chain(io.Chain[Option[A], Option[B]], io.Of[Option[B]], f)
}
@@ -99,21 +100,21 @@ func MonadAp[B, A any](mab IOOption[func(A) B], ma IOOption[A]) IOOption[B] {
mab, ma)
}
func Ap[B, A any](ma IOOption[A]) func(IOOption[func(A) B]) IOOption[B] {
func Ap[B, A any](ma IOOption[A]) Operator[func(A) B, B] {
return optiont.Ap(
io.Ap[Option[B], Option[A]],
io.Map[Option[func(A) B], func(Option[A]) Option[B]],
ma)
}
func ApSeq[B, A any](ma IOOption[A]) func(IOOption[func(A) B]) IOOption[B] {
func ApSeq[B, A any](ma IOOption[A]) Operator[func(A) B, B] {
return optiont.Ap(
io.ApSeq[Option[B], Option[A]],
io.Map[Option[func(A) B], func(Option[A]) Option[B]],
ma)
}
func ApPar[B, A any](ma IOOption[A]) func(IOOption[func(A) B]) IOOption[B] {
func ApPar[B, A any](ma IOOption[A]) Operator[func(A) B, B] {
return optiont.Ap(
io.ApPar[Option[B], Option[A]],
io.Map[Option[func(A) B], func(Option[A]) Option[B]],
@@ -124,14 +125,14 @@ func Flatten[A any](mma IOOption[IOOption[A]]) IOOption[A] {
return MonadChain(mma, function.Identity[IOOption[A]])
}
func Optionize0[A any](f func() (A, bool)) func() IOOption[A] {
func Optionize0[A any](f func() (A, bool)) Lazy[IOOption[A]] {
ef := option.Optionize0(f)
return func() IOOption[A] {
return ef
}
}
func Optionize1[T1, A any](f func(t1 T1) (A, bool)) func(T1) IOOption[A] {
func Optionize1[T1, A any](f func(t1 T1) (A, bool)) Kleisli[T1, A] {
ef := option.Optionize1(f)
return func(t1 T1) IOOption[A] {
return func() Option[A] {
@@ -172,8 +173,8 @@ func Memoize[A any](ma IOOption[A]) IOOption[A] {
}
// Fold convers an [IOOption] into an [IO]
func Fold[A, B any](onNone func() IO[B], onSome func(A) IO[B]) func(IOOption[A]) IO[B] {
return optiont.MatchE(io.MonadChain[Option[A], B], onNone, onSome)
func Fold[A, B any](onNone IO[B], onSome io.Kleisli[A, B]) func(IOOption[A]) IO[B] {
return optiont.MatchE(io.Chain[Option[A], B], function.Constant(onNone), onSome)
}
// Defer creates an IO by creating a brand new IO via a generator function, each time
@@ -191,28 +192,28 @@ func FromEither[E, A any](e Either[E, A]) IOOption[A] {
}
// MonadAlt identifies an associative operation on a type constructor
func MonadAlt[A any](first IOOption[A], second Lazy[IOOption[A]]) IOOption[A] {
func MonadAlt[A any](first IOOption[A], second IOOption[A]) IOOption[A] {
return optiont.MonadAlt(
io.MonadOf[Option[A]],
io.MonadChain[Option[A], Option[A]],
first,
second,
lazy.Of(second),
)
}
// Alt identifies an associative operation on a type constructor
func Alt[A any](second Lazy[IOOption[A]]) func(IOOption[A]) IOOption[A] {
func Alt[A any](second IOOption[A]) Operator[A, A] {
return optiont.Alt(
io.Of[Option[A]],
io.Chain[Option[A], Option[A]],
second,
lazy.Of(second),
)
}
// MonadChainFirst runs the monad returned by the function but returns the result of the original monad
func MonadChainFirst[A, B any](ma IOOption[A], f func(A) IOOption[B]) IOOption[A] {
func MonadChainFirst[A, B any](ma IOOption[A], f Kleisli[A, B]) IOOption[A] {
return chain.MonadChainFirst(
MonadChain[A, A],
MonadMap[B, A],
@@ -222,7 +223,7 @@ func MonadChainFirst[A, B any](ma IOOption[A], f func(A) IOOption[B]) IOOption[A
}
// ChainFirst runs the monad returned by the function but returns the result of the original monad
func ChainFirst[A, B any](f func(A) IOOption[B]) func(IOOption[A]) IOOption[A] {
func ChainFirst[A, B any](f Kleisli[A, B]) Operator[A, A] {
return chain.ChainFirst(
Chain[A, A],
Map[B, A],
@@ -231,7 +232,7 @@ func ChainFirst[A, B any](f func(A) IOOption[B]) func(IOOption[A]) IOOption[A] {
}
// MonadChainFirstIOK runs the monad returned by the function but returns the result of the original monad
func MonadChainFirstIOK[A, B any](first IOOption[A], f func(A) IO[B]) IOOption[A] {
func MonadChainFirstIOK[A, B any](first IOOption[A], f io.Kleisli[A, B]) IOOption[A] {
return fromio.MonadChainFirstIOK(
MonadChain[A, A],
MonadMap[B, A],
@@ -242,7 +243,7 @@ func MonadChainFirstIOK[A, B any](first IOOption[A], f func(A) IO[B]) IOOption[A
}
// ChainFirstIOK runs the monad returned by the function but returns the result of the original monad
func ChainFirstIOK[A, B any](f func(A) IO[B]) func(IOOption[A]) IOOption[A] {
func ChainFirstIOK[A, B any](f io.Kleisli[A, B]) Operator[A, A] {
return fromio.ChainFirstIOK(
Chain[A, A],
Map[B, A],
@@ -252,11 +253,11 @@ func ChainFirstIOK[A, B any](f func(A) IO[B]) func(IOOption[A]) IOOption[A] {
}
// Delay creates an operation that passes in the value after some delay
func Delay[A any](delay time.Duration) func(IOOption[A]) IOOption[A] {
func Delay[A any](delay time.Duration) Operator[A, A] {
return io.Delay[Option[A]](delay)
}
// After creates an operation that passes after the given [time.Time]
func After[A any](timestamp time.Time) func(IOOption[A]) IOOption[A] {
func After[A any](timestamp time.Time) Operator[A, A] {
return io.After[Option[A]](timestamp)
}

View File

@@ -19,7 +19,7 @@ import "github.com/IBM/fp-go/v2/function"
// WithResource constructs a function that creates a resource, then operates on it and then releases the resource
func WithResource[
R, A, ANY any](onCreate IOOption[R], onRelease func(R) IOOption[ANY]) Kleisli[Kleisli[R, A], A] {
R, A, ANY any](onCreate IOOption[R], onRelease Kleisli[R, ANY]) Kleisli[Kleisli[R, A], A] {
// simply map to implementation of bracket
return function.Bind13of3(Bracket[R, A, ANY])(onCreate, function.Ignore2of2[Option[A]](onRelease))
}

View File

@@ -265,5 +265,3 @@
// In practice, they are the same type, but the lazy package provides a more focused
// API for pure computations.
package lazy
// Made with Bob

View File

@@ -501,5 +501,3 @@ func TestMapComposition(t *testing.T) {
assert.Equal(t, 20, result())
}
// Made with Bob

View File

@@ -1,4 +1,234 @@
# Optics
Refer to [Introduction to optics: lenses and prisms](https://medium.com/@gcanti/introduction-to-optics-lenses-and-prisms-3230e73bfcfe) for an introduction about functional optics.
Functional optics for composable data access and manipulation in Go.
## Overview
Optics are first-class, composable references to parts of data structures. They provide a uniform interface for reading, writing, and transforming nested immutable data without verbose boilerplate code.
## Quick Start
```go
import (
"github.com/IBM/fp-go/v2/optics/lens"
F "github.com/IBM/fp-go/v2/function"
)
type Person struct {
Name string
Age int
}
// Create a lens for the Name field
nameLens := lens.MakeLens(
func(p Person) string { return p.Name },
func(p Person, name string) Person {
p.Name = name
return p
},
)
person := Person{Name: "Alice", Age: 30}
// Get the name
name := nameLens.Get(person) // "Alice"
// Set a new name (returns a new Person)
updated := nameLens.Set("Bob")(person)
// person.Name is still "Alice", updated.Name is "Bob"
```
## Core Optics Types
### Lens - Product Types (Structs)
Focus on a single field within a struct. Provides get and set operations.
**Use when:** Working with struct fields that always exist.
```go
ageLens := lens.MakeLens(
func(p Person) int { return p.Age },
func(p Person, age int) Person {
p.Age = age
return p
},
)
```
### Prism - Sum Types (Variants)
Focus on one variant of a sum type. Provides optional get and definite set.
**Use when:** Working with Either, Result, or custom sum types.
```go
import "github.com/IBM/fp-go/v2/optics/prism"
successPrism := prism.MakePrism(
func(r Result) option.Option[int] {
if s, ok := r.(Success); ok {
return option.Some(s.Value)
}
return option.None[int]()
},
func(v int) Result { return Success{Value: v} },
)
```
### Iso - Isomorphisms
Bidirectional transformation between equivalent types with no information loss.
**Use when:** Converting between equivalent representations (e.g., Celsius ↔ Fahrenheit).
```go
import "github.com/IBM/fp-go/v2/optics/iso"
celsiusToFahrenheit := iso.MakeIso(
func(c float64) float64 { return c*9/5 + 32 },
func(f float64) float64 { return (f - 32) * 5 / 9 },
)
```
### Optional - Maybe Values
Focus on a value that may or may not exist.
**Use when:** Working with nullable fields or values that may be absent.
```go
import "github.com/IBM/fp-go/v2/optics/optional"
timeoutOptional := optional.MakeOptional(
func(c Config) option.Option[*int] {
return option.FromNillable(c.Timeout)
},
func(c Config, t *int) Config {
c.Timeout = t
return c
},
)
```
### Traversal - Multiple Values
Focus on multiple values simultaneously, allowing batch operations.
**Use when:** Working with collections or updating multiple fields at once.
```go
import (
"github.com/IBM/fp-go/v2/optics/traversal"
TA "github.com/IBM/fp-go/v2/optics/traversal/array"
)
numbers := []int{1, 2, 3, 4, 5}
// Double all elements
doubled := F.Pipe2(
numbers,
TA.Traversal[int](),
traversal.Modify[[]int, int](func(n int) int { return n * 2 }),
)
// Result: [2, 4, 6, 8, 10]
```
## Composition
The real power of optics comes from composition:
```go
type Company struct {
Name string
Address Address
}
type Address struct {
Street string
City string
}
// Individual lenses
addressLens := lens.MakeLens(
func(c Company) Address { return c.Address },
func(c Company, a Address) Company {
c.Address = a
return c
},
)
cityLens := lens.MakeLens(
func(a Address) string { return a.City },
func(a Address, city string) Address {
a.City = city
return a
},
)
// Compose to access city directly from company
companyCityLens := F.Pipe1(
addressLens,
lens.Compose[Company](cityLens),
)
company := Company{
Name: "Acme Corp",
Address: Address{Street: "Main St", City: "NYC"},
}
city := companyCityLens.Get(company) // "NYC"
updated := companyCityLens.Set("Boston")(company)
```
## Optics Hierarchy
```
Iso[S, A]
Lens[S, A]
Optional[S, A]
Traversal[S, A]
Prism[S, A]
Optional[S, A]
Traversal[S, A]
```
More specific optics can be converted to more general ones.
## Package Structure
- **optics/lens**: Lenses for product types (structs)
- **optics/prism**: Prisms for sum types (Either, Result, etc.)
- **optics/iso**: Isomorphisms for equivalent types
- **optics/optional**: Optional optics for maybe values
- **optics/traversal**: Traversals for multiple values
Each package includes specialized sub-packages for common patterns:
- **array**: Optics for arrays/slices
- **either**: Optics for Either types
- **option**: Optics for Option types
- **record**: Optics for maps
## Documentation
For detailed documentation on each optic type, see:
- [Main Package Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics)
- [Lens Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics/lens)
- [Prism Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics/prism)
- [Iso Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics/iso)
- [Optional Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics/optional)
- [Traversal Documentation](https://pkg.go.dev/github.com/IBM/fp-go/v2/optics/traversal)
## Further Reading
For an introduction to functional optics concepts:
- [Introduction to optics: lenses and prisms](https://medium.com/@gcanti/introduction-to-optics-lenses-and-prisms-3230e73bfcfe) by Giulio Canti
## Examples
See the [samples/lens](../samples/lens) directory for complete working examples.
## License
Apache License 2.0 - See LICENSE file for details.

231
v2/optics/iso/lens/doc.go Normal file
View File

@@ -0,0 +1,231 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package lens provides conversions from isomorphisms to lenses.
# Overview
This package bridges the gap between isomorphisms (bidirectional transformations)
and lenses (focused accessors). Since every isomorphism can be viewed as a lens,
this package provides functions to perform that conversion.
An isomorphism Iso[S, A] represents a lossless bidirectional transformation between
types S and A. A lens Lens[S, A] provides focused access to a part A within a
structure S. Since an isomorphism can transform the entire structure S to A and back,
it naturally forms a lens that focuses on the "whole as a part".
# Mathematical Foundation
Given an Iso[S, A] with:
- Get: S → A (forward transformation)
- ReverseGet: A → S (reverse transformation)
We can construct a Lens[S, A] with:
- Get: S → A (same as iso's Get)
- Set: A → S → S (implemented as: a => s => ReverseGet(a))
The lens laws are automatically satisfied because the isomorphism laws guarantee:
1. GetSet: Set(Get(s))(s) == s (from iso's round-trip law)
2. SetGet: Get(Set(a)(s)) == a (from iso's inverse law)
3. SetSet: Set(a2)(Set(a1)(s)) == Set(a2)(s) (trivially true)
# Basic Usage
Converting an isomorphism to a lens:
type Celsius float64
type Kelvin float64
// Create an isomorphism between Celsius and Kelvin
celsiusKelvinIso := iso.MakeIso(
func(c Celsius) Kelvin { return Kelvin(c + 273.15) },
func(k Kelvin) Celsius { return Celsius(k - 273.15) },
)
// Convert to a lens
celsiusKelvinLens := lens.IsoAsLens(celsiusKelvinIso)
// Use as a lens
celsius := Celsius(20.0)
kelvin := celsiusKelvinLens.Get(celsius) // 293.15 K
updated := celsiusKelvinLens.Set(Kelvin(300))(celsius) // 26.85°C
# Working with Pointers
For pointer-based structures, use IsoAsLensRef:
type UserId int
type User struct {
id UserId
name string
}
// Isomorphism between User pointer and UserId
userIdIso := iso.MakeIso(
func(u *User) UserId { return u.id },
func(id UserId) *User { return &User{id: id, name: "Unknown"} },
)
// Convert to a reference lens
userIdLens := lens.IsoAsLensRef(userIdIso)
user := &User{id: 42, name: "Alice"}
id := userIdLens.Get(user) // 42
updated := userIdLens.Set(UserId(100))(user) // New user with id 100
# Use Cases
1. Type Wrappers: Convert between newtype wrappers and their underlying types
type Email string
type ValidatedEmail struct{ value Email }
emailIso := iso.MakeIso(
func(ve ValidatedEmail) Email { return ve.value },
func(e Email) ValidatedEmail { return ValidatedEmail{value: e} },
)
emailLens := lens.IsoAsLens(emailIso)
2. Unit Conversions: Work with different units of measurement
type Meters float64
type Feet float64
metersFeetIso := iso.MakeIso(
func(m Meters) Feet { return Feet(m * 3.28084) },
func(f Feet) Meters { return Meters(f / 3.28084) },
)
distanceLens := lens.IsoAsLens(metersFeetIso)
3. Encoding/Decoding: Transform between different representations
type JSON string
type Config struct {
Host string
Port int
}
// Assuming encode/decode functions exist
configIso := iso.MakeIso(encode, decode)
configLens := lens.IsoAsLens(configIso)
# Composition
Lenses created from isomorphisms can be composed with other lenses:
type Temperature struct {
celsius Celsius
}
// Lens to access celsius field
celsiusFieldLens := L.MakeLens(
func(t Temperature) Celsius { return t.celsius },
func(t Temperature, c Celsius) Temperature {
t.celsius = c
return t
},
)
// Compose with iso-based lens to work with Kelvin
tempKelvinLens := F.Pipe1(
celsiusFieldLens,
L.Compose[Temperature](celsiusKelvinLens),
)
temp := Temperature{celsius: 20}
kelvin := tempKelvinLens.Get(temp) // 293.15 K
updated := tempKelvinLens.Set(Kelvin(300))(temp) // 26.85°C
# Comparison with Direct Lenses
While you can create a lens directly, using an isomorphism provides benefits:
1. Reusability: The isomorphism can be used in multiple contexts
2. Bidirectionality: The inverse transformation is explicitly available
3. Type Safety: Isomorphism laws ensure correctness
4. Composability: Isomorphisms compose naturally
Direct lens approach requires defining both get and set operations separately,
while the isomorphism approach defines the bidirectional transformation once
and converts it to a lens when needed.
# Performance Considerations
Converting an isomorphism to a lens has minimal overhead. The resulting lens
simply delegates to the isomorphism's Get and ReverseGet functions. However,
keep in mind:
1. Each Set operation performs a full transformation via ReverseGet
2. For pointer types, use IsoAsLensRef to ensure proper copying
3. The lens ignores the original structure in Set, using only the new value
# Function Reference
Conversion Functions:
- IsoAsLens: Convert Iso[S, A] to Lens[S, A] for value types
- IsoAsLensRef: Convert Iso[*S, A] to Lens[*S, A] for pointer types
# Related Packages
- github.com/IBM/fp-go/v2/optics/iso: Isomorphisms (bidirectional transformations)
- github.com/IBM/fp-go/v2/optics/lens: Lenses (focused accessors)
- github.com/IBM/fp-go/v2/optics/lens/iso: Convert lenses to isomorphisms (inverse operation)
- github.com/IBM/fp-go/v2/endomorphism: Endomorphisms (A → A functions)
- github.com/IBM/fp-go/v2/function: Function composition utilities
# Examples
Complete example with type wrappers:
type UserId int
type Username string
type User struct {
id UserId
name Username
}
// Isomorphism for UserId
userIdIso := iso.MakeIso(
func(u User) UserId { return u.id },
func(id UserId) User { return User{id: id, name: "Unknown"} },
)
// Isomorphism for Username
usernameIso := iso.MakeIso(
func(u User) Username { return u.name },
func(name Username) User { return User{id: 0, name: name} },
)
// Convert to lenses
idLens := lens.IsoAsLens(userIdIso)
nameLens := lens.IsoAsLens(usernameIso)
user := User{id: 42, name: "Alice"}
// Access and modify through lenses
id := idLens.Get(user) // 42
name := nameLens.Get(user) // "Alice"
renamed := nameLens.Set("Bob")(user) // User{id: 0, name: "Bob"}
reidentified := idLens.Set(UserId(100))(user) // User{id: 100, name: "Unknown"}
Note: When using Set with iso-based lenses, the entire structure is replaced
via ReverseGet, so other fields may be reset to default values. For partial
updates, use regular lenses instead.
*/
package lens

View File

@@ -18,16 +18,15 @@ package lens
import (
EM "github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
I "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
)
// IsoAsLens converts an `Iso` to a `Lens`
func IsoAsLens[S, A any](sa I.Iso[S, A]) L.Lens[S, A] {
func IsoAsLens[S, A any](sa Iso[S, A]) Lens[S, A] {
return L.MakeLensCurried(sa.Get, F.Flow2(sa.ReverseGet, F.Flow2(F.Constant1[S, S], EM.Of[func(S) S])))
}
// IsoAsLensRef converts an `Iso` to a `Lens`
func IsoAsLensRef[S, A any](sa I.Iso[*S, A]) L.Lens[*S, A] {
func IsoAsLensRef[S, A any](sa Iso[*S, A]) Lens[*S, A] {
return L.MakeLensRefCurried(sa.Get, F.Flow2(sa.ReverseGet, F.Flow2(F.Constant1[*S, *S], EM.Of[func(*S) *S])))
}

View File

@@ -0,0 +1,399 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lens
import (
"testing"
F "github.com/IBM/fp-go/v2/function"
ISO "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
"github.com/stretchr/testify/assert"
)
// Test types
type Celsius float64
type Fahrenheit float64
type UserId int
type User struct {
id UserId
name string
}
type Meters float64
type Feet float64
// TestIsoAsLensBasic tests basic functionality of IsoAsLens
func TestIsoAsLensBasic(t *testing.T) {
// Create an isomorphism between Celsius and Fahrenheit
celsiusToFahrenheit := func(c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
fahrenheitToCelsius := func(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
tempIso := ISO.MakeIso(celsiusToFahrenheit, fahrenheitToCelsius)
tempLens := IsoAsLens(tempIso)
t.Run("Get", func(t *testing.T) {
celsius := Celsius(20.0)
fahrenheit := tempLens.Get(celsius)
assert.InDelta(t, 68.0, float64(fahrenheit), 0.001)
})
t.Run("Set", func(t *testing.T) {
celsius := Celsius(20.0)
newFahrenheit := Fahrenheit(86.0)
updated := tempLens.Set(newFahrenheit)(celsius)
assert.InDelta(t, 30.0, float64(updated), 0.001)
})
t.Run("SetPreservesOriginal", func(t *testing.T) {
original := Celsius(20.0)
newFahrenheit := Fahrenheit(86.0)
_ = tempLens.Set(newFahrenheit)(original)
// Original should be unchanged
assert.Equal(t, Celsius(20.0), original)
})
}
// TestIsoAsLensRefBasic tests basic functionality of IsoAsLensRef
func TestIsoAsLensRefBasic(t *testing.T) {
// Create an isomorphism for User pointer and UserId
userToId := func(u *User) UserId {
return u.id
}
idToUser := func(id UserId) *User {
return &User{id: id, name: "Unknown"}
}
userIdIso := ISO.MakeIso(userToId, idToUser)
userIdLens := IsoAsLensRef(userIdIso)
t.Run("Get", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
id := userIdLens.Get(user)
assert.Equal(t, UserId(42), id)
})
t.Run("Set", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
newId := UserId(100)
updated := userIdLens.Set(newId)(user)
assert.Equal(t, UserId(100), updated.id)
assert.Equal(t, "Unknown", updated.name) // ReverseGet creates new user
})
t.Run("SetCreatesNewPointer", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
newId := UserId(100)
updated := userIdLens.Set(newId)(user)
// Should be different pointers
assert.NotSame(t, user, updated)
// Original should be unchanged
assert.Equal(t, UserId(42), user.id)
assert.Equal(t, "Alice", user.name)
})
}
// TestIsoAsLensLaws verifies that IsoAsLens satisfies lens laws
func TestIsoAsLensLaws(t *testing.T) {
// Create a simple isomorphism
type Wrapper struct{ value int }
wrapperIso := ISO.MakeIso(
func(w Wrapper) int { return w.value },
func(i int) Wrapper { return Wrapper{value: i} },
)
lens := IsoAsLens(wrapperIso)
wrapper := Wrapper{value: 42}
newValue := 100
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := lens.Set(lens.Get(wrapper))(wrapper)
assert.Equal(t, wrapper, result)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := lens.Get(lens.Set(newValue)(wrapper))
assert.Equal(t, newValue, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
result1 := lens.Set(200)(lens.Set(newValue)(wrapper))
result2 := lens.Set(200)(wrapper)
assert.Equal(t, result2, result1)
})
}
// TestIsoAsLensRefLaws verifies that IsoAsLensRef satisfies lens laws
func TestIsoAsLensRefLaws(t *testing.T) {
type Wrapper struct{ value int }
wrapperIso := ISO.MakeIso(
func(w *Wrapper) int { return w.value },
func(i int) *Wrapper { return &Wrapper{value: i} },
)
lens := IsoAsLensRef(wrapperIso)
wrapper := &Wrapper{value: 42}
newValue := 100
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := lens.Set(lens.Get(wrapper))(wrapper)
assert.Equal(t, wrapper.value, result.value)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := lens.Get(lens.Set(newValue)(wrapper))
assert.Equal(t, newValue, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
result1 := lens.Set(200)(lens.Set(newValue)(wrapper))
result2 := lens.Set(200)(wrapper)
assert.Equal(t, result2.value, result1.value)
})
}
// TestIsoAsLensComposition tests composing iso-based lenses with other lenses
func TestIsoAsLensComposition(t *testing.T) {
type Temperature struct {
celsius Celsius
}
// Lens to access celsius field
celsiusFieldLens := L.MakeLens(
func(t Temperature) Celsius { return t.celsius },
func(t Temperature, c Celsius) Temperature {
t.celsius = c
return t
},
)
// Isomorphism between Celsius and Fahrenheit
celsiusToFahrenheit := func(c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
fahrenheitToCelsius := func(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
tempIso := ISO.MakeIso(celsiusToFahrenheit, fahrenheitToCelsius)
tempLens := IsoAsLens(tempIso)
// Compose to work with Fahrenheit directly from Temperature
composedLens := F.Pipe1(
celsiusFieldLens,
L.Compose[Temperature](tempLens),
)
temp := Temperature{celsius: 20}
t.Run("ComposedGet", func(t *testing.T) {
fahrenheit := composedLens.Get(temp)
assert.InDelta(t, 68.0, float64(fahrenheit), 0.001)
})
t.Run("ComposedSet", func(t *testing.T) {
newFahrenheit := Fahrenheit(86.0)
updated := composedLens.Set(newFahrenheit)(temp)
assert.InDelta(t, 30.0, float64(updated.celsius), 0.001)
})
}
// TestIsoAsLensModify tests using Modify with iso-based lenses
func TestIsoAsLensModify(t *testing.T) {
// Isomorphism between Meters and Feet
metersToFeet := func(m Meters) Feet {
return Feet(m * 3.28084)
}
feetToMeters := func(f Feet) Meters {
return Meters(f / 3.28084)
}
distanceIso := ISO.MakeIso(metersToFeet, feetToMeters)
distanceLens := IsoAsLens(distanceIso)
meters := Meters(10.0)
t.Run("ModifyDouble", func(t *testing.T) {
// Double the distance in feet, result in meters
doubleFeet := func(f Feet) Feet { return f * 2 }
modified := L.Modify[Meters](doubleFeet)(distanceLens)(meters)
assert.InDelta(t, 20.0, float64(modified), 0.001)
})
t.Run("ModifyIdentity", func(t *testing.T) {
// Identity modification should return same value
identity := func(f Feet) Feet { return f }
modified := L.Modify[Meters](identity)(distanceLens)(meters)
assert.InDelta(t, float64(meters), float64(modified), 0.001)
})
}
// TestIsoAsLensWithIdentityIso tests that identity iso creates identity lens
func TestIsoAsLensWithIdentityIso(t *testing.T) {
type Value int
idIso := ISO.Id[Value]()
idLens := IsoAsLens(idIso)
value := Value(42)
t.Run("IdentityGet", func(t *testing.T) {
result := idLens.Get(value)
assert.Equal(t, value, result)
})
t.Run("IdentitySet", func(t *testing.T) {
newValue := Value(100)
result := idLens.Set(newValue)(value)
assert.Equal(t, newValue, result)
})
}
// TestIsoAsLensRefWithIdentityIso tests identity iso with references
func TestIsoAsLensRefWithIdentityIso(t *testing.T) {
type Value struct{ n int }
idIso := ISO.Id[*Value]()
idLens := IsoAsLensRef(idIso)
value := &Value{n: 42}
t.Run("IdentityGet", func(t *testing.T) {
result := idLens.Get(value)
assert.Equal(t, value, result)
})
t.Run("IdentitySet", func(t *testing.T) {
newValue := &Value{n: 100}
result := idLens.Set(newValue)(value)
assert.Equal(t, newValue, result)
})
}
// TestIsoAsLensRoundTrip tests round-trip conversions
func TestIsoAsLensRoundTrip(t *testing.T) {
type Email string
type ValidatedEmail struct{ value Email }
emailIso := ISO.MakeIso(
func(ve ValidatedEmail) Email { return ve.value },
func(e Email) ValidatedEmail { return ValidatedEmail{value: e} },
)
emailLens := IsoAsLens(emailIso)
validated := ValidatedEmail{value: "user@example.com"}
t.Run("RoundTripThroughGet", func(t *testing.T) {
// Get the email, then Set it back
email := emailLens.Get(validated)
restored := emailLens.Set(email)(validated)
assert.Equal(t, validated, restored)
})
t.Run("RoundTripThroughSet", func(t *testing.T) {
// Set a new email, then Get it
newEmail := Email("admin@example.com")
updated := emailLens.Set(newEmail)(validated)
retrieved := emailLens.Get(updated)
assert.Equal(t, newEmail, retrieved)
})
}
// TestIsoAsLensWithComplexTypes tests with more complex type transformations
func TestIsoAsLensWithComplexTypes(t *testing.T) {
type Point struct {
x, y float64
}
type PolarCoord struct {
r, theta float64
}
// Isomorphism between Cartesian and Polar coordinates (simplified for testing)
cartesianToPolar := func(p Point) PolarCoord {
r := p.x*p.x + p.y*p.y
theta := 0.0 // Simplified
return PolarCoord{r: r, theta: theta}
}
polarToCartesian := func(pc PolarCoord) Point {
return Point{x: pc.r, y: pc.theta} // Simplified
}
coordIso := ISO.MakeIso(cartesianToPolar, polarToCartesian)
coordLens := IsoAsLens(coordIso)
point := Point{x: 3.0, y: 4.0}
t.Run("ComplexGet", func(t *testing.T) {
polar := coordLens.Get(point)
assert.NotNil(t, polar)
})
t.Run("ComplexSet", func(t *testing.T) {
newPolar := PolarCoord{r: 5.0, theta: 0.927}
updated := coordLens.Set(newPolar)(point)
assert.NotNil(t, updated)
})
}
// TestIsoAsLensTypeConversion tests type conversion scenarios
func TestIsoAsLensTypeConversion(t *testing.T) {
type StringWrapper string
type IntWrapper int
// Isomorphism that converts string length to int
strLenIso := ISO.MakeIso(
func(s StringWrapper) IntWrapper { return IntWrapper(len(s)) },
func(i IntWrapper) StringWrapper {
// Create a string of given length (simplified)
result := ""
for j := 0; j < int(i); j++ {
result += "x"
}
return StringWrapper(result)
},
)
strLenLens := IsoAsLens(strLenIso)
t.Run("StringToLength", func(t *testing.T) {
str := StringWrapper("hello")
length := strLenLens.Get(str)
assert.Equal(t, IntWrapper(5), length)
})
t.Run("LengthToString", func(t *testing.T) {
str := StringWrapper("hello")
newLength := IntWrapper(3)
updated := strLenLens.Set(newLength)(str)
assert.Equal(t, 3, len(updated))
})
}

View File

@@ -0,0 +1,11 @@
package lens
import (
"github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
)
type (
Lens[S, A any] = L.Lens[S, A]
Iso[S, A any] = iso.Iso[S, A]
)

303
v2/optics/iso/option/doc.go Normal file
View File

@@ -0,0 +1,303 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package option provides isomorphisms for working with Option types.
# Overview
This package offers utilities to convert between regular values and Option-wrapped values,
particularly useful for handling zero values and optional data. It provides isomorphisms
that treat certain values (like zero values) as representing absence, mapping them to None,
while other values map to Some.
# Core Functionality
The main function in this package is FromZero, which creates an isomorphism between a
comparable type T and Option[T], treating the zero value as None.
# FromZero Isomorphism
FromZero creates a bidirectional transformation where:
- Forward (Get): T → Option[T]
- Zero value → None
- Non-zero value → Some(value)
- Reverse (ReverseGet): Option[T] → T
- None → Zero value
- Some(value) → value
# Basic Usage
Working with integers:
import (
"github.com/IBM/fp-go/v2/optics/iso/option"
O "github.com/IBM/fp-go/v2/option"
)
isoInt := option.FromZero[int]()
// Convert zero to None
opt := isoInt.Get(0) // None[int]
// Convert non-zero to Some
opt = isoInt.Get(42) // Some(42)
// Convert None to zero
val := isoInt.ReverseGet(O.None[int]()) // 0
// Convert Some to value
val = isoInt.ReverseGet(O.Some(42)) // 42
# Use Cases
## Database Nullable Columns
Convert between database NULL and Go zero values:
type User struct {
ID int
Name string
Age *int // NULL in database
Email *string
}
ageIso := option.FromZero[*int]()
// Reading from database
var dbAge *int = nil
optAge := ageIso.Get(dbAge) // None[*int]
// Writing to database
userAge := 25
dbAge = ageIso.ReverseGet(O.Some(&userAge)) // &25
## Configuration with Defaults
Handle optional configuration values:
type Config struct {
Port int
Timeout int
MaxConn int
}
portIso := option.FromZero[int]()
// Use zero as "not configured"
config := Config{Port: 0, Timeout: 30, MaxConn: 100}
portOpt := portIso.Get(config.Port) // None[int] (use default)
// Set explicit value
config.Port = portIso.ReverseGet(O.Some(8080)) // 8080
## API Response Handling
Work with APIs that use zero values to indicate absence:
type APIResponse struct {
UserID int // 0 means not set
Score float64 // 0.0 means not available
Message string // "" means no message
}
userIDIso := option.FromZero[int]()
scoreIso := option.FromZero[float64]()
messageIso := option.FromZero[string]()
response := APIResponse{UserID: 0, Score: 0.0, Message: ""}
userID := userIDIso.Get(response.UserID) // None[int]
score := scoreIso.Get(response.Score) // None[float64]
message := messageIso.Get(response.Message) // None[string]
## Validation Logic
Simplify required vs optional field validation:
type FormData struct {
Name string // Required
Email string // Required
Phone string // Optional (empty = not provided)
Comments string // Optional
}
phoneIso := option.FromZero[string]()
commentsIso := option.FromZero[string]()
form := FormData{
Name: "Alice",
Email: "alice@example.com",
Phone: "",
Comments: "",
}
// Check optional fields
phone := phoneIso.Get(form.Phone) // None[string]
comments := commentsIso.Get(form.Comments) // None[string]
// Validate: required fields must be non-empty
if form.Name == "" || form.Email == "" {
// Validation error
}
# Working with Different Types
## Strings
strIso := option.FromZero[string]()
opt := strIso.Get("") // None[string]
opt = strIso.Get("hello") // Some("hello")
val := strIso.ReverseGet(O.None[string]()) // ""
val = strIso.ReverseGet(O.Some("world")) // "world"
## Pointers
ptrIso := option.FromZero[*int]()
opt := ptrIso.Get(nil) // None[*int]
num := 42
opt = ptrIso.Get(&num) // Some(&num)
val := ptrIso.ReverseGet(O.None[*int]()) // nil
val = ptrIso.ReverseGet(O.Some(&num)) // &num
## Floating Point Numbers
floatIso := option.FromZero[float64]()
opt := floatIso.Get(0.0) // None[float64]
opt = floatIso.Get(3.14) // Some(3.14)
val := floatIso.ReverseGet(O.None[float64]()) // 0.0
val = floatIso.ReverseGet(O.Some(2.71)) // 2.71
## Booleans
boolIso := option.FromZero[bool]()
opt := boolIso.Get(false) // None[bool]
opt = boolIso.Get(true) // Some(true)
val := boolIso.ReverseGet(O.None[bool]()) // false
val = boolIso.ReverseGet(O.Some(true)) // true
# Composition with Other Optics
Combine with lenses for nested structures:
import (
L "github.com/IBM/fp-go/v2/optics/lens"
I "github.com/IBM/fp-go/v2/optics/iso"
)
type Settings struct {
Volume int // 0 means muted
}
volumeLens := L.MakeLens(
func(s Settings) int { return s.Volume },
func(s Settings, v int) Settings {
s.Volume = v
return s
},
)
volumeIso := option.FromZero[int]()
// Compose lens with iso
volumeOptLens := F.Pipe1(
volumeLens,
L.IMap[Settings](volumeIso.Get, volumeIso.ReverseGet),
)
settings := Settings{Volume: 0}
vol := volumeOptLens.Get(settings) // None[int] (muted)
// Set volume
updated := volumeOptLens.Set(O.Some(75))(settings)
// updated.Volume == 75
# Isomorphism Laws
FromZero satisfies the isomorphism round-trip laws:
1. **ReverseGet(Get(t)) == t** for all t: T
isoInt := option.FromZero[int]()
value := 42
result := isoInt.ReverseGet(isoInt.Get(value))
// result == 42
2. **Get(ReverseGet(opt)) == opt** for all opt: Option[T]
isoInt := option.FromZero[int]()
opt := O.Some(42)
result := isoInt.Get(isoInt.ReverseGet(opt))
// result == Some(42)
These laws ensure that the transformation is truly reversible with no information loss.
# Performance Considerations
The FromZero isomorphism is very efficient:
- No allocations for the iso structure itself
- Simple equality comparison for zero check
- Direct value unwrapping for ReverseGet
- No reflection or runtime type assertions
# Type Safety
The isomorphism is fully type-safe:
- Compile-time type checking ensures T is comparable
- Generic type parameters prevent type mismatches
- No runtime type assertions needed
- The compiler enforces correct usage
# Limitations
The FromZero isomorphism has some limitations to be aware of:
1. **Zero Value Ambiguity**: Cannot distinguish between "intentionally zero" and "absent"
- For int: 0 always maps to None, even if 0 is a valid value
- For string: "" always maps to None, even if empty string is valid
- Solution: Use a different representation (e.g., pointers) if zero is meaningful
2. **Comparable Constraint**: Only works with comparable types
- Cannot use with slices, maps, or functions
- Cannot use with structs containing non-comparable fields
- Solution: Use pointers to such types, or custom isomorphisms
3. **Boolean Limitation**: false always maps to None
- Cannot represent "explicitly false" vs "not set"
- Solution: Use *bool or a custom type if this distinction matters
# Related Packages
- github.com/IBM/fp-go/v2/optics/iso: Core isomorphism functionality
- github.com/IBM/fp-go/v2/option: Option type and operations
- github.com/IBM/fp-go/v2/optics/lens: Lenses for focused access
- github.com/IBM/fp-go/v2/optics/lens/option: Lenses for optional values
# See Also
For more information on isomorphisms and optics:
- optics/iso package documentation
- optics package overview
- option package documentation
*/
package option

View File

@@ -453,6 +453,8 @@ Core Lens Creation:
- MakeLensCurried: Create a lens with curried setter
- MakeLensRef: Create a lens for pointer-based structures
- MakeLensRefCurried: Create a lens for pointers with curried setter
- MakeLensWithEq: Create a lens with equality optimization for pointer structures
- MakeLensStrict: Create a lens with strict equality optimization for pointer structures
- Id: Create an identity lens
- IdRef: Create an identity lens for pointers

364
v2/optics/lens/iso/doc.go Normal file
View File

@@ -0,0 +1,364 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package iso provides utilities for composing lenses with isomorphisms.
# Overview
This package bridges lenses and isomorphisms, allowing you to transform the focus type
of a lens using an isomorphism. It provides functions to compose lenses with isomorphisms
and to create isomorphisms for common patterns like nullable pointers.
The key insight is that if you have a Lens[S, A] and an Iso[A, B], you can create a
Lens[S, B] by composing them. This allows you to work with transformed views of your
data without changing the underlying structure.
# Core Functions
## FromNillable
Creates an isomorphism between a nullable pointer and an Option type:
type Config struct {
Timeout *int
}
// Create isomorphism: *int ↔ Option[int]
timeoutIso := iso.FromNillable[int]()
// nil → None, &value → Some(value)
opt := timeoutIso.Get(nil) // None[int]
num := 42
opt = timeoutIso.Get(&num) // Some(42)
// None → nil, Some(value) → &value
ptr := timeoutIso.ReverseGet(O.None[int]()) // nil
ptr = timeoutIso.ReverseGet(O.Some(42)) // &42
## Compose
Composes a lens with an isomorphism to transform the focus type:
type Person struct {
Name string
Age int
}
type Celsius float64
type Fahrenheit float64
type Weather struct {
Temperature Celsius
}
// Lens to access temperature
tempLens := L.MakeLens(
func(w Weather) Celsius { return w.Temperature },
func(w Weather, t Celsius) Weather {
w.Temperature = t
return w
},
)
// Isomorphism: Celsius ↔ Fahrenheit
celsiusToFahrenheit := I.MakeIso(
func(c Celsius) Fahrenheit { return Fahrenheit(c*9/5 + 32) },
func(f Fahrenheit) Celsius { return Celsius((f - 32) * 5 / 9) },
)
// Compose to work with Fahrenheit
tempFahrenheitLens := F.Pipe1(
tempLens,
iso.Compose[Weather, Celsius, Fahrenheit](celsiusToFahrenheit),
)
weather := Weather{Temperature: 20} // 20°C
tempF := tempFahrenheitLens.Get(weather) // 68°F
updated := tempFahrenheitLens.Set(86)(weather) // Set to 86°F (30°C)
# Use Cases
## Working with Nullable Fields
Convert between nullable pointers and Option types:
type DatabaseConfig struct {
Host string
Port int
Username string
Password *string // Nullable
}
type AppConfig struct {
Database *DatabaseConfig
}
// Lens to database config
dbLens := L.MakeLens(
func(c AppConfig) *DatabaseConfig { return c.Database },
func(c AppConfig, db *DatabaseConfig) AppConfig {
c.Database = db
return c
},
)
// Isomorphism for nullable pointer
dbIso := iso.FromNillable[DatabaseConfig]()
// Compose to work with Option
dbOptLens := F.Pipe1(
dbLens,
iso.Compose[AppConfig, *DatabaseConfig, O.Option[DatabaseConfig]](dbIso),
)
config := AppConfig{Database: nil}
dbOpt := dbOptLens.Get(config) // None[DatabaseConfig]
// Set with Some
newDB := DatabaseConfig{Host: "localhost", Port: 5432}
updated := dbOptLens.Set(O.Some(newDB))(config)
## Unit Conversions
Work with different units of measurement:
type Distance struct {
Meters float64
}
type Kilometers float64
type Miles float64
// Lens to meters
metersLens := L.MakeLens(
func(d Distance) float64 { return d.Meters },
func(d Distance, m float64) Distance {
d.Meters = m
return d
},
)
// Isomorphism: meters ↔ kilometers
metersToKm := I.MakeIso(
func(m float64) Kilometers { return Kilometers(m / 1000) },
func(km Kilometers) float64 { return float64(km * 1000) },
)
// Compose to work with kilometers
kmLens := F.Pipe1(
metersLens,
iso.Compose[Distance, float64, Kilometers](metersToKm),
)
distance := Distance{Meters: 5000}
km := kmLens.Get(distance) // 5 km
updated := kmLens.Set(Kilometers(10))(distance) // 10000 meters
## Type Wrappers
Work with newtype wrappers:
type UserId int
type User struct {
ID UserId
Name string
}
// Lens to user ID
idLens := L.MakeLens(
func(u User) UserId { return u.ID },
func(u User, id UserId) User {
u.ID = id
return u
},
)
// Isomorphism: UserId ↔ int
userIdIso := I.MakeIso(
func(id UserId) int { return int(id) },
func(i int) UserId { return UserId(i) },
)
// Compose to work with raw int
idIntLens := F.Pipe1(
idLens,
iso.Compose[User, UserId, int](userIdIso),
)
user := User{ID: 42, Name: "Alice"}
rawId := idIntLens.Get(user) // 42 (int)
updated := idIntLens.Set(100)(user) // UserId(100)
## Nested Nullable Fields
Safely navigate through nullable nested structures:
type Address struct {
Street string
City string
}
type Person struct {
Name string
Address *Address
}
type Company struct {
Name string
CEO *Person
}
// Lens to CEO
ceoLens := L.MakeLens(
func(c Company) *Person { return c.CEO },
func(c Company, p *Person) Company {
c.CEO = p
return c
},
)
// Isomorphism for nullable person
personIso := iso.FromNillable[Person]()
// Compose to work with Option[Person]
ceoOptLens := F.Pipe1(
ceoLens,
iso.Compose[Company, *Person, O.Option[Person]](personIso),
)
company := Company{Name: "Acme Corp", CEO: nil}
ceo := ceoOptLens.Get(company) // None[Person]
// Set CEO
newCEO := Person{Name: "Alice", Address: nil}
updated := ceoOptLens.Set(O.Some(newCEO))(company)
# Composition Patterns
## Chaining Multiple Isomorphisms
type Meters float64
type Kilometers float64
type Miles float64
type Journey struct {
Distance Meters
}
// Lens to distance
distLens := L.MakeLens(
func(j Journey) Meters { return j.Distance },
func(j Journey, d Meters) Journey {
j.Distance = d
return j
},
)
// Isomorphisms
metersToKm := I.MakeIso(
func(m Meters) Kilometers { return Kilometers(m / 1000) },
func(km Kilometers) Meters { return Meters(km * 1000) },
)
kmToMiles := I.MakeIso(
func(km Kilometers) Miles { return Miles(km * 0.621371) },
func(mi Miles) Kilometers { return Kilometers(mi / 0.621371) },
)
// Compose lens with chained isomorphisms
milesLens := F.Pipe2(
distLens,
iso.Compose[Journey, Meters, Kilometers](metersToKm),
iso.Compose[Journey, Kilometers, Miles](kmToMiles),
)
journey := Journey{Distance: 5000} // 5000 meters
miles := milesLens.Get(journey) // ~3.11 miles
## Combining with Optional Lenses
type Config struct {
Database *DatabaseConfig
}
type DatabaseConfig struct {
Port int
}
// Lens to database (nullable)
dbLens := L.MakeLens(
func(c Config) *DatabaseConfig { return c.Database },
func(c Config, db *DatabaseConfig) Config {
c.Database = db
return c
},
)
// Convert to Option lens
dbIso := iso.FromNillable[DatabaseConfig]()
dbOptLens := F.Pipe1(
dbLens,
iso.Compose[Config, *DatabaseConfig, O.Option[DatabaseConfig]](dbIso),
)
// Now compose with lens to port
portLens := L.MakeLens(
func(db DatabaseConfig) int { return db.Port },
func(db DatabaseConfig, port int) DatabaseConfig {
db.Port = port
return db
},
)
// Use ComposeOption to handle the Option
defaultDB := DatabaseConfig{Port: 5432}
configPortLens := F.Pipe1(
dbOptLens,
L.ComposeOption[Config, int](defaultDB)(portLens),
)
# Performance Considerations
Composing lenses with isomorphisms is efficient:
- No additional allocations beyond the lens and iso structures
- Composition creates function closures but is still performant
- The isomorphism transformations are applied on-demand
- Consider caching composed lenses for frequently used paths
# Type Safety
All operations are fully type-safe:
- Compile-time type checking ensures correct composition
- Generic type parameters prevent type mismatches
- No runtime type assertions needed
- The compiler enforces that isomorphisms are properly reversible
# Related Packages
- github.com/IBM/fp-go/v2/optics/lens: Core lens functionality
- github.com/IBM/fp-go/v2/optics/iso: Core isomorphism functionality
- github.com/IBM/fp-go/v2/optics/iso/lens: Convert isomorphisms to lenses
- github.com/IBM/fp-go/v2/option: Option type and operations
- github.com/IBM/fp-go/v2/function: Function composition utilities
# See Also
For more information on lenses and isomorphisms:
- optics/lens package documentation
- optics/iso package documentation
- optics package overview
*/
package iso

View File

@@ -24,18 +24,18 @@ import (
)
// FromNillable converts a nillable value to an option and back
func FromNillable[T any]() I.Iso[*T, O.Option[T]] {
func FromNillable[T any]() Iso[*T, Option[T]] {
return I.MakeIso(F.Flow2(
O.FromPredicate(F.IsNonNil[T]),
O.Map(F.Deref[T]),
),
O.Fold(F.Constant((*T)(nil)), F.Ref[T]),
O.Fold(F.ConstNil[T], F.Ref[T]),
)
}
// Compose converts a Lens to a property of `A` into a lens to a property of type `B`
// the transformation is done via an ISO
func Compose[S, A, B any](ab I.Iso[A, B]) func(sa L.Lens[S, A]) L.Lens[S, B] {
func Compose[S, A, B any](ab Iso[A, B]) Operator[S, A, B] {
return F.Pipe2(
ab,
IL.IsoAsLens[A, B],

View File

@@ -0,0 +1,14 @@
package iso
import (
"github.com/IBM/fp-go/v2/optics/iso"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/option"
)
type (
Option[A any] = option.Option[A]
Iso[S, A any] = iso.Iso[S, A]
Lens[S, A any] = lens.Lens[S, A]
Operator[S, A, B any] = lens.Operator[S, A, B]
)

View File

@@ -17,6 +17,7 @@
package lens
import (
"github.com/IBM/fp-go/v2/endomorphism"
EQ "github.com/IBM/fp-go/v2/eq"
F "github.com/IBM/fp-go/v2/function"
)
@@ -43,7 +44,7 @@ func setCopyWithEq[GET ~func(*S) A, SET ~func(*S, A) *S, S, A any](pred EQ.Eq[A]
// setCopyCurried wraps a setter for a pointer into a setter that first creates a copy before
// modifying that copy
func setCopyCurried[SET ~func(A) Endomorphism[*S], S, A any](setter SET) func(a A) Endomorphism[*S] {
func setCopyCurried[SET ~func(A) Endomorphism[*S], S, A any](setter SET) func(A) Endomorphism[*S] {
return func(a A) Endomorphism[*S] {
seta := setter(a)
return func(s *S) *S {
@@ -374,7 +375,7 @@ func IdRef[S any]() Lens[*S, *S] {
}
// Compose combines two lenses and allows to narrow down the focus to a sub-lens
func compose[GET ~func(S) B, SET ~func(S, B) S, S, A, B any](creator func(get GET, set SET) Lens[S, B], ab Lens[A, B]) func(Lens[S, A]) Lens[S, B] {
func compose[GET ~func(S) B, SET ~func(B) func(S) S, S, A, B any](creator func(get GET, set SET) Lens[S, B], ab Lens[A, B]) Operator[S, A, B] {
abget := ab.Get
abset := ab.Set
return func(sa Lens[S, A]) Lens[S, B] {
@@ -382,8 +383,12 @@ func compose[GET ~func(S) B, SET ~func(S, B) S, S, A, B any](creator func(get GE
saset := sa.Set
return creator(
F.Flow2(saget, abget),
func(s S, b B) S {
return saset(abset(b)(saget(s)))(s)
func(b B) func(S) S {
return endomorphism.Join(F.Flow3(
saget,
abset(b),
saset,
))
},
)
}
@@ -435,8 +440,8 @@ func compose[GET ~func(S) B, SET ~func(S, B) S, S, A, B any](creator func(get GE
// person := Person{Name: "Alice", Address: Address{Street: "Main St"}}
// street := personStreetLens.Get(person) // "Main St"
// updated := personStreetLens.Set("Oak Ave")(person)
func Compose[S, A, B any](ab Lens[A, B]) func(Lens[S, A]) Lens[S, B] {
return compose(MakeLens[func(S) B, func(S, B) S], ab)
func Compose[S, A, B any](ab Lens[A, B]) Operator[S, A, B] {
return compose(MakeLensCurried[func(S) B, func(B) func(S) S], ab)
}
// ComposeRef combines two lenses for pointer-based structures.
@@ -477,12 +482,8 @@ func Compose[S, A, B any](ab Lens[A, B]) func(Lens[S, A]) Lens[S, B] {
// )
//
// personStreetLens := F.Pipe1(addressLens, lens.ComposeRef[Person](streetLens))
func ComposeRef[S, A, B any](ab Lens[A, B]) func(Lens[*S, A]) Lens[*S, B] {
return compose(MakeLensRef[func(*S) B, func(*S, B) *S], ab)
}
func modify[FCT ~func(A) A, S, A any](f FCT, sa Lens[S, A], s S) S {
return sa.Set(f(sa.Get(s)))(s)
func ComposeRef[S, A, B any](ab Lens[A, B]) Operator[*S, A, B] {
return compose(MakeLensRefCurried[S, B], ab)
}
// Modify transforms a value through a lens using a transformation F.
@@ -531,7 +532,13 @@ func modify[FCT ~func(A) A, S, A any](f FCT, sa Lens[S, A], s S) S {
// )
// // doubled.Value == 10
func Modify[S any, FCT ~func(A) A, A any](f FCT) func(Lens[S, A]) Endomorphism[S] {
return F.Curry3(modify[FCT, S, A])(f)
return func(la Lens[S, A]) Endomorphism[S] {
return endomorphism.Join(F.Flow3(
la.Get,
f,
la.Set,
))
}
}
// IMap transforms the focus type of a lens using an isomorphism.
@@ -585,8 +592,8 @@ func Modify[S any, FCT ~func(A) A, A any](f FCT) func(Lens[S, A]) Endomorphism[S
// weather := Weather{Temperature: 20} // 20°C
// tempF := tempFahrenheitLens.Get(weather) // 68°F
// updated := tempFahrenheitLens.Set(86)(weather) // Set to 86°F (30°C)
func IMap[E any, AB ~func(A) B, BA ~func(B) A, A, B any](ab AB, ba BA) func(Lens[E, A]) Lens[E, B] {
return func(ea Lens[E, A]) Lens[E, B] {
return Lens[E, B]{Get: F.Flow2(ea.Get, ab), Set: F.Flow2(ba, ea.Set)}
func IMap[S any, AB ~func(A) B, BA ~func(B) A, A, B any](ab AB, ba BA) Operator[S, A, B] {
return func(ea Lens[S, A]) Lens[S, B] {
return MakeLensCurried(F.Flow2(ea.Get, ab), F.Flow2(ba, ea.Set))
}
}

View File

@@ -0,0 +1,638 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lens
import (
"testing"
EQ "github.com/IBM/fp-go/v2/eq"
F "github.com/IBM/fp-go/v2/function"
"github.com/stretchr/testify/assert"
)
// TestModify tests the Modify function
func TestModify(t *testing.T) {
type Counter struct {
Value int
}
valueLens := MakeLens(
func(c Counter) int { return c.Value },
func(c Counter, v int) Counter {
c.Value = v
return c
},
)
counter := Counter{Value: 5}
// Test increment
increment := func(v int) int { return v + 1 }
modifyIncrement := Modify[Counter](increment)(valueLens)
incremented := modifyIncrement(counter)
assert.Equal(t, 6, incremented.Value)
assert.Equal(t, 5, counter.Value) // Original unchanged
// Test double
double := func(v int) int { return v * 2 }
modifyDouble := Modify[Counter](double)(valueLens)
doubled := modifyDouble(counter)
assert.Equal(t, 10, doubled.Value)
assert.Equal(t, 5, counter.Value) // Original unchanged
// Test identity (no change)
identity := func(v int) int { return v }
modifyIdentity := Modify[Counter](identity)(valueLens)
unchanged := modifyIdentity(counter)
assert.Equal(t, counter, unchanged)
}
func TestModifyRef(t *testing.T) {
valueLens := MakeLensRef(
func(s *Street) int { return s.num },
func(s *Street, num int) *Street {
s.num = num
return s
},
)
street := &Street{num: 10, name: "Main"}
// Test increment
increment := func(v int) int { return v + 1 }
modifyIncrement := Modify[*Street](increment)(valueLens)
incremented := modifyIncrement(street)
assert.Equal(t, 11, incremented.num)
assert.Equal(t, 10, street.num) // Original unchanged
}
// Lens Laws Tests
func TestMakeLensLaws(t *testing.T) {
nameLens := MakeLens(
func(s Street) string { return s.name },
func(s Street, name string) Street {
s.name = name
return s
},
)
street := Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street, result)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2, result1)
})
}
func TestMakeLensRefLaws(t *testing.T) {
nameLens := MakeLensRef(
(*Street).GetName,
(*Street).SetName,
)
street := &Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street.name, result.name)
assert.Equal(t, street.num, result.num)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2.name, result1.name)
assert.Equal(t, result2.num, result1.num)
})
}
func TestMakeLensCurriedLaws(t *testing.T) {
nameLens := MakeLensCurried(
func(s Street) string { return s.name },
func(name string) func(Street) Street {
return func(s Street) Street {
s.name = name
return s
}
},
)
street := Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street, result)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2, result1)
})
}
func TestMakeLensRefCurriedLaws(t *testing.T) {
nameLens := MakeLensRefCurried(
func(s *Street) string { return s.name },
func(name string) func(*Street) *Street {
return func(s *Street) *Street {
s.name = name
return s
}
},
)
street := &Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street.name, result.name)
assert.Equal(t, street.num, result.num)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2.name, result1.name)
assert.Equal(t, result2.num, result1.num)
})
}
func TestMakeLensWithEqLaws(t *testing.T) {
nameLens := MakeLensWithEq(
EQ.FromStrictEquals[string](),
func(s *Street) string { return s.name },
func(s *Street, name string) *Street {
s.name = name
return s
},
)
street := &Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street.name, result.name)
assert.Equal(t, street.num, result.num)
// With Eq optimization, should return same pointer
assert.Same(t, street, result)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2.name, result1.name)
assert.Equal(t, result2.num, result1.num)
})
}
func TestMakeLensStrictLaws(t *testing.T) {
nameLens := MakeLensStrict(
func(s *Street) string { return s.name },
func(s *Street, name string) *Street {
s.name = name
return s
},
)
street := &Street{num: 1, name: "Main"}
newName := "Oak"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := nameLens.Set(nameLens.Get(street))(street)
assert.Equal(t, street.name, result.name)
assert.Equal(t, street.num, result.num)
// With strict equality optimization, should return same pointer
assert.Same(t, street, result)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := nameLens.Get(nameLens.Set(newName)(street))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := nameLens.Set("Elm")(nameLens.Set(newName)(street))
result2 := nameLens.Set("Elm")(street)
assert.Equal(t, result2.name, result1.name)
assert.Equal(t, result2.num, result1.num)
})
}
func TestIdLaws(t *testing.T) {
idLens := Id[Street]()
street := Street{num: 1, name: "Main"}
newStreet := Street{num: 2, name: "Oak"}
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := idLens.Set(idLens.Get(street))(street)
assert.Equal(t, street, result)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := idLens.Get(idLens.Set(newStreet)(street))
assert.Equal(t, newStreet, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
anotherStreet := Street{num: 3, name: "Elm"}
result1 := idLens.Set(anotherStreet)(idLens.Set(newStreet)(street))
result2 := idLens.Set(anotherStreet)(street)
assert.Equal(t, result2, result1)
})
}
func TestIdRefLaws(t *testing.T) {
idLens := IdRef[Street]()
street := &Street{num: 1, name: "Main"}
newStreet := &Street{num: 2, name: "Oak"}
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := idLens.Set(idLens.Get(street))(street)
assert.Equal(t, street.name, result.name)
assert.Equal(t, street.num, result.num)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := idLens.Get(idLens.Set(newStreet)(street))
assert.Equal(t, newStreet, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
anotherStreet := &Street{num: 3, name: "Elm"}
result1 := idLens.Set(anotherStreet)(idLens.Set(newStreet)(street))
result2 := idLens.Set(anotherStreet)(street)
assert.Equal(t, result2, result1)
})
}
func TestComposeLaws(t *testing.T) {
streetLens := MakeLensRef((*Street).GetName, (*Street).SetName)
addrLens := MakeLensRef((*Address).GetStreet, (*Address).SetStreet)
// Compose to get street name from address
streetNameLens := Compose[*Address](streetLens)(addrLens)
sampleStreet := Street{num: 220, name: "Schönaicherstr"}
sampleAddress := Address{city: "Böblingen", street: &sampleStreet}
newName := "Böblingerstr"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := streetNameLens.Set(streetNameLens.Get(&sampleAddress))(&sampleAddress)
assert.Equal(t, sampleAddress.street.name, result.street.name)
assert.Equal(t, sampleAddress.street.num, result.street.num)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := streetNameLens.Get(streetNameLens.Set(newName)(&sampleAddress))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := streetNameLens.Set("Elm St")(streetNameLens.Set(newName)(&sampleAddress))
result2 := streetNameLens.Set("Elm St")(&sampleAddress)
assert.Equal(t, result2.street.name, result1.street.name)
})
}
func TestComposeRefLaws(t *testing.T) {
streetLens := MakeLensRef((*Street).GetName, (*Street).SetName)
addrLens := MakeLensRef((*Address).GetStreet, (*Address).SetStreet)
// Compose using ComposeRef
streetNameLens := ComposeRef[Address](streetLens)(addrLens)
sampleStreet := Street{num: 220, name: "Schönaicherstr"}
sampleAddress := Address{city: "Böblingen", street: &sampleStreet}
newName := "Böblingerstr"
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := streetNameLens.Set(streetNameLens.Get(&sampleAddress))(&sampleAddress)
assert.Equal(t, sampleAddress.street.name, result.street.name)
assert.Equal(t, sampleAddress.street.num, result.street.num)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := streetNameLens.Get(streetNameLens.Set(newName)(&sampleAddress))
assert.Equal(t, newName, result)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
result1 := streetNameLens.Set("Elm St")(streetNameLens.Set(newName)(&sampleAddress))
result2 := streetNameLens.Set("Elm St")(&sampleAddress)
assert.Equal(t, result2.street.name, result1.street.name)
})
}
func TestIMapLaws(t *testing.T) {
type Celsius float64
type Fahrenheit float64
celsiusToFahrenheit := func(c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
fahrenheitToCelsius := func(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
type Weather struct {
Temperature Celsius
}
tempCelsiusLens := MakeLens(
func(w Weather) Celsius { return w.Temperature },
func(w Weather, t Celsius) Weather {
w.Temperature = t
return w
},
)
// Create a lens that works with Fahrenheit
tempFahrenheitLens := F.Pipe1(
tempCelsiusLens,
IMap[Weather](celsiusToFahrenheit, fahrenheitToCelsius),
)
weather := Weather{Temperature: 20} // 20°C
newTempF := Fahrenheit(86) // 86°F (30°C)
// Law 1: GetSet
t.Run("GetSet", func(t *testing.T) {
result := tempFahrenheitLens.Set(tempFahrenheitLens.Get(weather))(weather)
// Allow small floating point differences
assert.InDelta(t, float64(weather.Temperature), float64(result.Temperature), 0.0001)
})
// Law 2: SetGet
t.Run("SetGet", func(t *testing.T) {
result := tempFahrenheitLens.Get(tempFahrenheitLens.Set(newTempF)(weather))
assert.InDelta(t, float64(newTempF), float64(result), 0.0001)
})
// Law 3: SetSet
t.Run("SetSet", func(t *testing.T) {
anotherTempF := Fahrenheit(95) // 95°F (35°C)
result1 := tempFahrenheitLens.Set(anotherTempF)(tempFahrenheitLens.Set(newTempF)(weather))
result2 := tempFahrenheitLens.Set(anotherTempF)(weather)
assert.InDelta(t, float64(result2.Temperature), float64(result1.Temperature), 0.0001)
})
}
func TestIMapIdentity(t *testing.T) {
// IMap with identity functions should behave like the original lens
type S struct {
a int
}
originalLens := MakeLens(
func(s S) int { return s.a },
func(s S, a int) S {
s.a = a
return s
},
)
// Apply IMap with identity functions
identityMappedLens := F.Pipe1(
originalLens,
IMap[S](F.Identity[int], F.Identity[int]),
)
s := S{a: 42}
// Both lenses should behave identically
assert.Equal(t, originalLens.Get(s), identityMappedLens.Get(s))
assert.Equal(t, originalLens.Set(100)(s), identityMappedLens.Set(100)(s))
}
func TestIMapComposition(t *testing.T) {
// IMap(f, g) ∘ IMap(h, k) = IMap(f ∘ h, k ∘ g)
type S struct {
value int
}
baseLens := MakeLens(
func(s S) int { return s.value },
func(s S, v int) S {
s.value = v
return s
},
)
// First transformation: int -> float64
intToFloat := func(i int) float64 { return float64(i) }
floatToInt := func(f float64) int { return int(f) }
// Second transformation: float64 -> string
floatToString := func(f float64) string { return F.Pipe1(f, func(x float64) string { return "value" }) }
stringToFloat := func(s string) float64 { return 42.0 }
// Compose IMap twice
lens1 := F.Pipe1(baseLens, IMap[S](intToFloat, floatToInt))
lens2 := F.Pipe1(lens1, IMap[S](floatToString, stringToFloat))
// Direct composition
lens3 := F.Pipe1(
baseLens,
IMap[S](
F.Flow2(intToFloat, floatToString),
F.Flow2(stringToFloat, floatToInt),
),
)
s := S{value: 10}
// Both should produce the same results
assert.Equal(t, lens2.Get(s), lens3.Get(s))
assert.Equal(t, lens2.Set("test")(s), lens3.Set("test")(s))
}
func TestModifyLaws(t *testing.T) {
// Modify should satisfy: Modify(id) = id
// and: Modify(f ∘ g) = Modify(f) ∘ Modify(g)
type S struct {
value int
}
lens := MakeLens(
func(s S) int { return s.value },
func(s S, v int) S {
s.value = v
return s
},
)
s := S{value: 10}
// Modify with identity should return the same value
t.Run("ModifyIdentity", func(t *testing.T) {
modifyIdentity := Modify[S](F.Identity[int])(lens)
result := modifyIdentity(s)
assert.Equal(t, s, result)
})
// Modify composition: Modify(f ∘ g) = Modify(f) ∘ Modify(g)
t.Run("ModifyComposition", func(t *testing.T) {
f := func(x int) int { return x * 2 }
g := func(x int) int { return x + 3 }
// Modify(f ∘ g)
composed := F.Flow2(g, f)
modifyComposed := Modify[S](composed)(lens)
result1 := modifyComposed(s)
// Modify(f) ∘ Modify(g)
modifyG := Modify[S](g)(lens)
intermediate := modifyG(s)
modifyF := Modify[S](f)(lens)
result2 := modifyF(intermediate)
assert.Equal(t, result1, result2)
})
}
func TestComposeAssociativity(t *testing.T) {
// Test that lens composition is associative:
// (l1 ∘ l2) ∘ l3 = l1 ∘ (l2 ∘ l3)
type Level3 struct {
value string
}
type Level2 struct {
level3 Level3
}
type Level1 struct {
level2 Level2
}
lens12 := MakeLens(
func(l1 Level1) Level2 { return l1.level2 },
func(l1 Level1, l2 Level2) Level1 {
l1.level2 = l2
return l1
},
)
lens23 := MakeLens(
func(l2 Level2) Level3 { return l2.level3 },
func(l2 Level2, l3 Level3) Level2 {
l2.level3 = l3
return l2
},
)
lens3Value := MakeLens(
func(l3 Level3) string { return l3.value },
func(l3 Level3, v string) Level3 {
l3.value = v
return l3
},
)
// (lens12 ∘ lens23) ∘ lens3Value
composed1 := F.Pipe2(
lens12,
Compose[Level1](lens23),
Compose[Level1](lens3Value),
)
// lens12 ∘ (lens23 ∘ lens3Value)
composed2 := F.Pipe1(
lens12,
Compose[Level1](F.Pipe1(lens23, Compose[Level2](lens3Value))),
)
l1 := Level1{
level2: Level2{
level3: Level3{value: "test"},
},
}
// Both compositions should behave identically
assert.Equal(t, composed1.Get(l1), composed2.Get(l1))
assert.Equal(t, composed1.Set("new")(l1), composed2.Set("new")(l1))
}

View File

@@ -1,7 +1,9 @@
package option
import (
"github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/lazy"
"github.com/IBM/fp-go/v2/optics/lens"
O "github.com/IBM/fp-go/v2/option"
@@ -51,9 +53,9 @@ import (
// defaultSettings := &Settings{}
// configRetriesLens := F.Pipe1(settingsLens,
// lens.Compose[Config, *int](defaultSettings)(retriesLens))
func Compose[S, B, A any](defaultA A) func(ab LensO[A, B]) func(LensO[S, A]) LensO[S, B] {
func Compose[S, B, A any](defaultA A) func(LensO[A, B]) Operator[S, A, B] {
noneb := O.None[B]()
return func(ab LensO[A, B]) func(LensO[S, A]) LensO[S, B] {
return func(ab LensO[A, B]) Operator[S, A, B] {
abGet := ab.Get
abSetNone := ab.Set(noneb)
return func(sa LensO[S, A]) LensO[S, B] {
@@ -62,41 +64,24 @@ func Compose[S, B, A any](defaultA A) func(ab LensO[A, B]) func(LensO[S, A]) Len
setSomeA := F.Flow2(O.Some[A], sa.Set)
return lens.MakeLensCurried(
F.Flow2(saGet, O.Chain(abGet)),
func(optB Option[B]) Endomorphism[S] {
return func(s S) S {
optA := saGet(s)
return O.MonadFold(
optB,
// optB is None
func() S {
return O.MonadFold(
optA,
// optA is None - no-op
F.Constant(s),
// optA is Some - unset B in A
func(a A) S {
return setSomeA(abSetNone(a))(s)
},
)
},
// optB is Some
func(b B) S {
setB := ab.Set(O.Some(b))
return O.MonadFold(
optA,
// optA is None - create with defaultA
func() S {
return setSomeA(setB(defaultA))(s)
},
// optA is Some - update B in A
func(a A) S {
return setSomeA(setB(a))(s)
},
)
},
)
}
},
F.Flow2(
O.Fold(
// optB is None
lazy.Of(F.Flow2(
saGet,
O.Fold(endomorphism.Identity[S], F.Flow2(abSetNone, setSomeA)),
)),
// optB is Some
func(b B) func(S) Endomorphism[S] {
setB := ab.Set(O.Some(b))
return F.Flow2(
saGet,
O.Fold(lazy.Of(setSomeA(setB(defaultA))), F.Flow2(setB, setSomeA)),
)
},
),
endomorphism.Join[S],
),
)
}
}
@@ -150,8 +135,8 @@ func Compose[S, B, A any](defaultA A) func(ab LensO[A, B]) func(LensO[S, A]) Len
// port := configPortLens.Get(config) // None[int]
// updated := configPortLens.Set(O.Some(3306))(config)
// // updated.Database.Port == 3306, Host == "localhost" (from default)
func ComposeOption[S, B, A any](defaultA A) func(ab Lens[A, B]) func(LensO[S, A]) LensO[S, B] {
return func(ab Lens[A, B]) func(LensO[S, A]) LensO[S, B] {
func ComposeOption[S, B, A any](defaultA A) func(Lens[A, B]) Operator[S, A, B] {
return func(ab Lens[A, B]) Operator[S, A, B] {
abGet := ab.Get
abSet := ab.Set
return func(sa LensO[S, A]) LensO[S, B] {
@@ -159,33 +144,23 @@ func ComposeOption[S, B, A any](defaultA A) func(ab Lens[A, B]) func(LensO[S, A]
saSet := sa.Set
// Pre-compute setters
setNoneA := saSet(O.None[A]())
setSomeA := func(a A) Endomorphism[S] {
return saSet(O.Some(a))
}
return lens.MakeLens(
func(s S) Option[B] {
return O.Map(abGet)(saGet(s))
},
func(s S, optB Option[B]) S {
return O.Fold(
// optB is None - remove A entirely
F.Constant(setNoneA(s)),
// optB is Some - set B
func(b B) S {
optA := saGet(s)
return O.Fold(
// optA is None - create with defaultA
func() S {
return setSomeA(abSet(b)(defaultA))(s)
},
// optA is Some - update B in A
func(a A) S {
return setSomeA(abSet(b)(a))(s)
},
)(optA)
},
)(optB)
},
setSomeA := F.Flow2(O.Some[A], saSet)
return lens.MakeLensCurried(
F.Flow2(saGet, O.Map(abGet)),
O.Fold(
// optB is None - remove A entirely
lazy.Of(setNoneA),
// optB is Some - set B
func(b B) Endomorphism[S] {
absetB := abSet(b)
abSetA := absetB(defaultA)
return endomorphism.Join(F.Flow3(
saGet,
O.Fold(lazy.Of(abSetA), absetB),
setSomeA,
))
},
),
)
}
}

View File

@@ -0,0 +1,841 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package option
import (
"testing"
EQT "github.com/IBM/fp-go/v2/eq/testing"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
// Test types for ComposeOption - using unique names to avoid conflicts
type (
DatabaseCfg struct {
Host string
Port int
}
ServerConfig struct {
Database *DatabaseCfg
}
AppSettings struct {
MaxRetries int
Timeout int
}
ApplicationConfig struct {
Settings *AppSettings
}
)
// Helper methods for DatabaseCfg
func (db *DatabaseCfg) GetPort() int {
return db.Port
}
func (db *DatabaseCfg) SetPort(port int) *DatabaseCfg {
db.Port = port
return db
}
// Helper methods for ServerConfig
func (c ServerConfig) GetDatabase() *DatabaseCfg {
return c.Database
}
func (c ServerConfig) SetDatabase(db *DatabaseCfg) ServerConfig {
c.Database = db
return c
}
// Helper methods for AppSettings
func (s *AppSettings) GetMaxRetries() int {
return s.MaxRetries
}
func (s *AppSettings) SetMaxRetries(retries int) *AppSettings {
s.MaxRetries = retries
return s
}
// Helper methods for ApplicationConfig
func (ac ApplicationConfig) GetSettings() *AppSettings {
return ac.Settings
}
func (ac ApplicationConfig) SetSettings(s *AppSettings) ApplicationConfig {
ac.Settings = s
return ac
}
// TestComposeOptionBasicOperations tests basic get/set operations
func TestComposeOptionBasicOperations(t *testing.T) {
// Create lenses
dbLens := FromNillable(L.MakeLens(ServerConfig.GetDatabase, ServerConfig.SetDatabase))
portLens := L.MakeLensRef((*DatabaseCfg).GetPort, (*DatabaseCfg).SetPort)
defaultDB := &DatabaseCfg{Host: "localhost", Port: 5432}
configPortLens := F.Pipe1(dbLens, ComposeOption[ServerConfig, int](defaultDB)(portLens))
t.Run("Get from empty config returns None", func(t *testing.T) {
config := ServerConfig{Database: nil}
result := configPortLens.Get(config)
assert.True(t, O.IsNone(result))
})
t.Run("Get from config with database returns Some", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
result := configPortLens.Get(config)
assert.Equal(t, O.Some(3306), result)
})
t.Run("Set Some on empty config creates database with default", func(t *testing.T) {
config := ServerConfig{Database: nil}
updated := configPortLens.Set(O.Some(3306))(config)
assert.NotNil(t, updated.Database)
assert.Equal(t, 3306, updated.Database.Port)
assert.Equal(t, "localhost", updated.Database.Host) // From default
})
t.Run("Set Some on existing database updates port", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 5432}}
updated := configPortLens.Set(O.Some(8080))(config)
assert.NotNil(t, updated.Database)
assert.Equal(t, 8080, updated.Database.Port)
assert.Equal(t, "example.com", updated.Database.Host) // Preserved
})
t.Run("Set None removes database entirely", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
updated := configPortLens.Set(O.None[int]())(config)
assert.Nil(t, updated.Database)
})
t.Run("Set None on empty config is no-op", func(t *testing.T) {
config := ServerConfig{Database: nil}
updated := configPortLens.Set(O.None[int]())(config)
assert.Nil(t, updated.Database)
})
}
// TestComposeOptionLensLawsDetailed verifies that ComposeOption satisfies lens laws
func TestComposeOptionLensLawsDetailed(t *testing.T) {
// Setup
defaultDB := &DatabaseCfg{Host: "localhost", Port: 5432}
dbLens := FromNillable(L.MakeLens(ServerConfig.GetDatabase, ServerConfig.SetDatabase))
portLens := L.MakeLensRef((*DatabaseCfg).GetPort, (*DatabaseCfg).SetPort)
configPortLens := F.Pipe1(dbLens, ComposeOption[ServerConfig, int](defaultDB)(portLens))
// Equality predicates
eqInt := EQT.Eq[int]()
eqOptInt := O.Eq(eqInt)
eqServerConfig := func(a, b ServerConfig) bool {
if a.Database == nil && b.Database == nil {
return true
}
if a.Database == nil || b.Database == nil {
return false
}
return a.Database.Host == b.Database.Host && a.Database.Port == b.Database.Port
}
// Test structures
configNil := ServerConfig{Database: nil}
config3306 := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
config5432 := ServerConfig{Database: &DatabaseCfg{Host: "test.com", Port: 5432}}
// Law 1: GetSet - lens.Get(lens.Set(a)(s)) == a
t.Run("Law1_GetSet_WithSome", func(t *testing.T) {
// Setting Some(8080) and getting back should return Some(8080)
result := configPortLens.Get(configPortLens.Set(O.Some(8080))(config3306))
assert.True(t, eqOptInt.Equals(result, O.Some(8080)),
"Get(Set(Some(8080))(s)) should equal Some(8080)")
})
t.Run("Law1_GetSet_WithNone", func(t *testing.T) {
// Setting None and getting back should return None
result := configPortLens.Get(configPortLens.Set(O.None[int]())(config3306))
assert.True(t, eqOptInt.Equals(result, O.None[int]()),
"Get(Set(None)(s)) should equal None")
})
t.Run("Law1_GetSet_OnEmptyWithSome", func(t *testing.T) {
// Setting Some on empty config and getting back
result := configPortLens.Get(configPortLens.Set(O.Some(9000))(configNil))
assert.True(t, eqOptInt.Equals(result, O.Some(9000)),
"Get(Set(Some(9000))(empty)) should equal Some(9000)")
})
// Law 2: SetGet - lens.Set(lens.Get(s))(s) == s
t.Run("Law2_SetGet_WithDatabase", func(t *testing.T) {
// Setting what we get should return the same structure
result := configPortLens.Set(configPortLens.Get(config3306))(config3306)
assert.True(t, eqServerConfig(result, config3306),
"Set(Get(s))(s) should equal s")
})
t.Run("Law2_SetGet_WithoutDatabase", func(t *testing.T) {
// Setting what we get from empty should return the same structure
result := configPortLens.Set(configPortLens.Get(configNil))(configNil)
assert.True(t, eqServerConfig(result, configNil),
"Set(Get(empty))(empty) should equal empty")
})
t.Run("Law2_SetGet_DifferentConfigs", func(t *testing.T) {
// Test with another config
result := configPortLens.Set(configPortLens.Get(config5432))(config5432)
assert.True(t, eqServerConfig(result, config5432),
"Set(Get(s))(s) should equal s for any s")
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("Law3_SetSet_BothSome", func(t *testing.T) {
// Setting twice with Some should be same as setting once
setTwice := configPortLens.Set(O.Some(9000))(configPortLens.Set(O.Some(8080))(config3306))
setOnce := configPortLens.Set(O.Some(9000))(config3306)
assert.True(t, eqServerConfig(setTwice, setOnce),
"Set(a2)(Set(a1)(s)) should equal Set(a2)(s)")
})
t.Run("Law3_SetSet_BothNone", func(t *testing.T) {
// Setting None twice should be same as setting once
setTwice := configPortLens.Set(O.None[int]())(configPortLens.Set(O.None[int]())(config3306))
setOnce := configPortLens.Set(O.None[int]())(config3306)
assert.True(t, eqServerConfig(setTwice, setOnce),
"Set(None)(Set(None)(s)) should equal Set(None)(s)")
})
t.Run("Law3_SetSet_SomeThenNone", func(t *testing.T) {
// Setting None after Some should be same as setting None directly
setTwice := configPortLens.Set(O.None[int]())(configPortLens.Set(O.Some(8080))(config3306))
setOnce := configPortLens.Set(O.None[int]())(config3306)
assert.True(t, eqServerConfig(setTwice, setOnce),
"Set(None)(Set(Some)(s)) should equal Set(None)(s)")
})
t.Run("Law3_SetSet_NoneThenSome", func(t *testing.T) {
// Setting Some after None creates a new database with default values
// This is different from setting Some directly which preserves existing fields
setTwice := configPortLens.Set(O.Some(8080))(configPortLens.Set(O.None[int]())(config3306))
// After setting None, the database is removed, so setting Some creates it with defaults
assert.NotNil(t, setTwice.Database)
assert.Equal(t, 8080, setTwice.Database.Port)
assert.Equal(t, "localhost", setTwice.Database.Host) // From default, not "example.com"
// This demonstrates that ComposeOption's behavior when setting None then Some
// uses the default value for the intermediate structure
setOnce := configPortLens.Set(O.Some(8080))(config3306)
assert.Equal(t, 8080, setOnce.Database.Port)
assert.Equal(t, "example.com", setOnce.Database.Host) // Preserved from original
// They are NOT equal because the Host field differs
assert.False(t, eqServerConfig(setTwice, setOnce),
"Set(Some)(Set(None)(s)) uses default, Set(Some)(s) preserves fields")
})
t.Run("Law3_SetSet_OnEmpty", func(t *testing.T) {
// Setting twice on empty config
setTwice := configPortLens.Set(O.Some(9000))(configPortLens.Set(O.Some(8080))(configNil))
setOnce := configPortLens.Set(O.Some(9000))(configNil)
assert.True(t, eqServerConfig(setTwice, setOnce),
"Set(a2)(Set(a1)(empty)) should equal Set(a2)(empty)")
})
}
// TestComposeOptionWithModify tests the Modify operation
func TestComposeOptionWithModify(t *testing.T) {
defaultDB := &DatabaseCfg{Host: "localhost", Port: 5432}
dbLens := FromNillable(L.MakeLens(ServerConfig.GetDatabase, ServerConfig.SetDatabase))
portLens := L.MakeLensRef((*DatabaseCfg).GetPort, (*DatabaseCfg).SetPort)
configPortLens := F.Pipe1(dbLens, ComposeOption[ServerConfig, int](defaultDB)(portLens))
t.Run("Modify with identity returns same structure", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
result := L.Modify[ServerConfig](F.Identity[Option[int]])(configPortLens)(config)
assert.Equal(t, config.Database.Port, result.Database.Port)
assert.Equal(t, config.Database.Host, result.Database.Host)
})
t.Run("Modify with Some transformation", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
// Double the port if it exists
doublePort := O.Map(func(p int) int { return p * 2 })
result := L.Modify[ServerConfig](doublePort)(configPortLens)(config)
assert.Equal(t, 6612, result.Database.Port)
assert.Equal(t, "example.com", result.Database.Host)
})
t.Run("Modify on empty config with Some transformation", func(t *testing.T) {
config := ServerConfig{Database: nil}
doublePort := O.Map(func(p int) int { return p * 2 })
result := L.Modify[ServerConfig](doublePort)(configPortLens)(config)
// Should remain empty since there's nothing to modify
assert.Nil(t, result.Database)
})
}
// TestComposeOptionComposition tests composing multiple ComposeOption lenses
func TestComposeOptionComposition(t *testing.T) {
type Level3 struct {
Value int
}
type Level2 struct {
Level3 *Level3
}
type Level1 struct {
Level2 *Level2
}
// Create lenses
level2Lens := FromNillable(L.MakeLens(
func(l1 Level1) *Level2 { return l1.Level2 },
func(l1 Level1, l2 *Level2) Level1 { l1.Level2 = l2; return l1 },
))
level3Lens := L.MakeLensRef(
func(l2 *Level2) *Level3 { return l2.Level3 },
func(l2 *Level2, l3 *Level3) *Level2 { l2.Level3 = l3; return l2 },
)
valueLens := L.MakeLensRef(
func(l3 *Level3) int { return l3.Value },
func(l3 *Level3, v int) *Level3 { l3.Value = v; return l3 },
)
// Compose: Level1 -> Option[Level2] -> Option[Level3] -> Option[int]
defaultLevel2 := &Level2{Level3: &Level3{Value: 0}}
defaultLevel3 := &Level3{Value: 0}
// First composition: Level1 -> Option[Level3]
level1ToLevel3 := F.Pipe1(level2Lens, ComposeOption[Level1, *Level3](defaultLevel2)(level3Lens))
// Second composition: Level1 -> Option[int]
level1ToValue := F.Pipe1(level1ToLevel3, ComposeOption[Level1, int](defaultLevel3)(valueLens))
t.Run("Get from fully populated structure", func(t *testing.T) {
l1 := Level1{Level2: &Level2{Level3: &Level3{Value: 42}}}
result := level1ToValue.Get(l1)
assert.Equal(t, O.Some(42), result)
})
t.Run("Get from empty structure", func(t *testing.T) {
l1 := Level1{Level2: nil}
result := level1ToValue.Get(l1)
assert.True(t, O.IsNone(result))
})
t.Run("Set on empty structure creates all levels", func(t *testing.T) {
l1 := Level1{Level2: nil}
updated := level1ToValue.Set(O.Some(100))(l1)
assert.NotNil(t, updated.Level2)
assert.NotNil(t, updated.Level2.Level3)
assert.Equal(t, 100, updated.Level2.Level3.Value)
})
t.Run("Set None removes top level", func(t *testing.T) {
l1 := Level1{Level2: &Level2{Level3: &Level3{Value: 42}}}
updated := level1ToValue.Set(O.None[int]())(l1)
assert.Nil(t, updated.Level2)
})
}
// TestComposeOptionEdgeCasesExtended tests additional edge cases
func TestComposeOptionEdgeCasesExtended(t *testing.T) {
defaultSettings := &AppSettings{MaxRetries: 3, Timeout: 30}
settingsLens := FromNillable(L.MakeLens(ApplicationConfig.GetSettings, ApplicationConfig.SetSettings))
retriesLens := L.MakeLensRef((*AppSettings).GetMaxRetries, (*AppSettings).SetMaxRetries)
configRetriesLens := F.Pipe1(settingsLens, ComposeOption[ApplicationConfig, int](defaultSettings)(retriesLens))
t.Run("Multiple sets with different values", func(t *testing.T) {
config := ApplicationConfig{Settings: nil}
// Set multiple times
config = configRetriesLens.Set(O.Some(5))(config)
assert.Equal(t, 5, config.Settings.MaxRetries)
config = configRetriesLens.Set(O.Some(10))(config)
assert.Equal(t, 10, config.Settings.MaxRetries)
config = configRetriesLens.Set(O.None[int]())(config)
assert.Nil(t, config.Settings)
})
t.Run("Get after Set maintains consistency", func(t *testing.T) {
config := ApplicationConfig{Settings: nil}
updated := configRetriesLens.Set(O.Some(7))(config)
retrieved := configRetriesLens.Get(updated)
assert.Equal(t, O.Some(7), retrieved)
})
t.Run("Default values are used correctly", func(t *testing.T) {
config := ApplicationConfig{Settings: nil}
updated := configRetriesLens.Set(O.Some(15))(config)
// Check that default timeout is used
assert.Equal(t, 30, updated.Settings.Timeout)
assert.Equal(t, 15, updated.Settings.MaxRetries)
})
t.Run("Preserves other fields when updating", func(t *testing.T) {
config := ApplicationConfig{Settings: &AppSettings{MaxRetries: 5, Timeout: 60}}
updated := configRetriesLens.Set(O.Some(10))(config)
assert.Equal(t, 10, updated.Settings.MaxRetries)
assert.Equal(t, 60, updated.Settings.Timeout) // Preserved
})
}
// TestComposeOptionWithZeroValues tests behavior with zero values
func TestComposeOptionWithZeroValues(t *testing.T) {
defaultDB := &DatabaseCfg{Host: "", Port: 0}
dbLens := FromNillable(L.MakeLens(ServerConfig.GetDatabase, ServerConfig.SetDatabase))
portLens := L.MakeLensRef((*DatabaseCfg).GetPort, (*DatabaseCfg).SetPort)
configPortLens := F.Pipe1(dbLens, ComposeOption[ServerConfig, int](defaultDB)(portLens))
t.Run("Set zero value", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 3306}}
updated := configPortLens.Set(O.Some(0))(config)
assert.Equal(t, 0, updated.Database.Port)
assert.Equal(t, "example.com", updated.Database.Host)
})
t.Run("Get zero value returns Some(0)", func(t *testing.T) {
config := ServerConfig{Database: &DatabaseCfg{Host: "example.com", Port: 0}}
result := configPortLens.Get(config)
assert.Equal(t, O.Some(0), result)
})
t.Run("Default with zero values", func(t *testing.T) {
config := ServerConfig{Database: nil}
updated := configPortLens.Set(O.Some(8080))(config)
assert.Equal(t, "", updated.Database.Host) // From default
assert.Equal(t, 8080, updated.Database.Port)
})
}
// ============================================================================
// Tests for Compose function (both lenses return Option values)
// ============================================================================
// TestComposeBasicOperations tests basic get/set operations for Compose
func TestComposeBasicOperations(t *testing.T) {
type Value struct {
Data *string
}
type Container struct {
Value *Value
}
// Create lenses
valueLens := FromNillable(L.MakeLens(
func(c Container) *Value { return c.Value },
func(c Container, v *Value) Container { c.Value = v; return c },
))
dataLens := L.MakeLensRef(
func(v *Value) *string { return v.Data },
func(v *Value, d *string) *Value { v.Data = d; return v },
)
defaultValue := &Value{Data: nil}
composedLens := F.Pipe1(valueLens, Compose[Container, *string](defaultValue)(
FromNillable(dataLens),
))
t.Run("Get from empty container returns None", func(t *testing.T) {
container := Container{Value: nil}
result := composedLens.Get(container)
assert.True(t, O.IsNone(result))
})
t.Run("Get from container with nil data returns None", func(t *testing.T) {
container := Container{Value: &Value{Data: nil}}
result := composedLens.Get(container)
assert.True(t, O.IsNone(result))
})
t.Run("Get from container with data returns Some", func(t *testing.T) {
data := "test"
container := Container{Value: &Value{Data: &data}}
result := composedLens.Get(container)
assert.True(t, O.IsSome(result))
assert.Equal(t, &data, O.GetOrElse(func() *string { return nil })(result))
})
t.Run("Set Some on empty container creates structure with default", func(t *testing.T) {
container := Container{Value: nil}
data := "new"
updated := composedLens.Set(O.Some(&data))(container)
assert.NotNil(t, updated.Value)
assert.NotNil(t, updated.Value.Data)
assert.Equal(t, "new", *updated.Value.Data)
})
t.Run("Set Some on existing container updates data", func(t *testing.T) {
oldData := "old"
container := Container{Value: &Value{Data: &oldData}}
newData := "new"
updated := composedLens.Set(O.Some(&newData))(container)
assert.NotNil(t, updated.Value)
assert.NotNil(t, updated.Value.Data)
assert.Equal(t, "new", *updated.Value.Data)
})
t.Run("Set None when container is empty is no-op", func(t *testing.T) {
container := Container{Value: nil}
updated := composedLens.Set(O.None[*string]())(container)
assert.Nil(t, updated.Value)
})
t.Run("Set None when container exists unsets data", func(t *testing.T) {
data := "test"
container := Container{Value: &Value{Data: &data}}
updated := composedLens.Set(O.None[*string]())(container)
assert.NotNil(t, updated.Value)
assert.Nil(t, updated.Value.Data)
})
}
// TestComposeLensLawsDetailed verifies that Compose satisfies lens laws
func TestComposeLensLawsDetailed(t *testing.T) {
type Inner struct {
Value *int
Extra string
}
type Outer struct {
Inner *Inner
}
// Setup
defaultInner := &Inner{Value: nil, Extra: "default"}
innerLens := FromNillable(L.MakeLens(
func(o Outer) *Inner { return o.Inner },
func(o Outer, i *Inner) Outer { o.Inner = i; return o },
))
valueLens := L.MakeLensRef(
func(i *Inner) *int { return i.Value },
func(i *Inner, v *int) *Inner { i.Value = v; return i },
)
composedLens := F.Pipe1(innerLens, Compose[Outer, *int](defaultInner)(
FromNillable(valueLens),
))
// Equality predicates
eqIntPtr := EQT.Eq[*int]()
eqOptIntPtr := O.Eq(eqIntPtr)
eqOuter := func(a, b Outer) bool {
if a.Inner == nil && b.Inner == nil {
return true
}
if a.Inner == nil || b.Inner == nil {
return false
}
aVal := a.Inner.Value
bVal := b.Inner.Value
if aVal == nil && bVal == nil {
return a.Inner.Extra == b.Inner.Extra
}
if aVal == nil || bVal == nil {
return false
}
return *aVal == *bVal && a.Inner.Extra == b.Inner.Extra
}
// Test structures
val42 := 42
val100 := 100
outerNil := Outer{Inner: nil}
outerWithNilValue := Outer{Inner: &Inner{Value: nil, Extra: "test"}}
outer42 := Outer{Inner: &Inner{Value: &val42, Extra: "test"}}
// Law 1: GetSet - lens.Get(lens.Set(a)(s)) == a
t.Run("Law1_GetSet_WithSome", func(t *testing.T) {
result := composedLens.Get(composedLens.Set(O.Some(&val100))(outer42))
assert.True(t, eqOptIntPtr.Equals(result, O.Some(&val100)),
"Get(Set(Some(100))(s)) should equal Some(100)")
})
t.Run("Law1_GetSet_WithNone", func(t *testing.T) {
result := composedLens.Get(composedLens.Set(O.None[*int]())(outer42))
assert.True(t, eqOptIntPtr.Equals(result, O.None[*int]()),
"Get(Set(None)(s)) should equal None")
})
t.Run("Law1_GetSet_OnEmpty", func(t *testing.T) {
result := composedLens.Get(composedLens.Set(O.Some(&val100))(outerNil))
assert.True(t, eqOptIntPtr.Equals(result, O.Some(&val100)),
"Get(Set(Some(100))(empty)) should equal Some(100)")
})
// Law 2: SetGet - lens.Set(lens.Get(s))(s) == s
t.Run("Law2_SetGet_WithValue", func(t *testing.T) {
result := composedLens.Set(composedLens.Get(outer42))(outer42)
assert.True(t, eqOuter(result, outer42),
"Set(Get(s))(s) should equal s")
})
t.Run("Law2_SetGet_WithNilValue", func(t *testing.T) {
result := composedLens.Set(composedLens.Get(outerWithNilValue))(outerWithNilValue)
assert.True(t, eqOuter(result, outerWithNilValue),
"Set(Get(s))(s) should equal s when value is nil")
})
t.Run("Law2_SetGet_WithNilInner", func(t *testing.T) {
result := composedLens.Set(composedLens.Get(outerNil))(outerNil)
assert.True(t, eqOuter(result, outerNil),
"Set(Get(empty))(empty) should equal empty")
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("Law3_SetSet_BothSome", func(t *testing.T) {
val200 := 200
setTwice := composedLens.Set(O.Some(&val200))(composedLens.Set(O.Some(&val100))(outer42))
setOnce := composedLens.Set(O.Some(&val200))(outer42)
assert.True(t, eqOuter(setTwice, setOnce),
"Set(a2)(Set(a1)(s)) should equal Set(a2)(s)")
})
t.Run("Law3_SetSet_BothNone", func(t *testing.T) {
setTwice := composedLens.Set(O.None[*int]())(composedLens.Set(O.None[*int]())(outer42))
setOnce := composedLens.Set(O.None[*int]())(outer42)
assert.True(t, eqOuter(setTwice, setOnce),
"Set(None)(Set(None)(s)) should equal Set(None)(s)")
})
t.Run("Law3_SetSet_SomeThenNone", func(t *testing.T) {
setTwice := composedLens.Set(O.None[*int]())(composedLens.Set(O.Some(&val100))(outer42))
setOnce := composedLens.Set(O.None[*int]())(outer42)
assert.True(t, eqOuter(setTwice, setOnce),
"Set(None)(Set(Some)(s)) should equal Set(None)(s)")
})
t.Run("Law3_SetSet_NoneThenSome", func(t *testing.T) {
// This case is interesting: setting None then Some uses default
setTwice := composedLens.Set(O.Some(&val100))(composedLens.Set(O.None[*int]())(outer42))
// After None, inner still exists but value is nil
// Then setting Some updates the value
assert.NotNil(t, setTwice.Inner)
assert.NotNil(t, setTwice.Inner.Value)
assert.Equal(t, 100, *setTwice.Inner.Value)
assert.Equal(t, "test", setTwice.Inner.Extra) // Preserved from original
})
}
// TestComposeWithModify tests the Modify operation for Compose
func TestComposeWithModify(t *testing.T) {
type Data struct {
Count *int
}
type Store struct {
Data *Data
}
defaultData := &Data{Count: nil}
dataLens := FromNillable(L.MakeLens(
func(s Store) *Data { return s.Data },
func(s Store, d *Data) Store { s.Data = d; return s },
))
countLens := L.MakeLensRef(
func(d *Data) *int { return d.Count },
func(d *Data, c *int) *Data { d.Count = c; return d },
)
composedLens := F.Pipe1(dataLens, Compose[Store, *int](defaultData)(
FromNillable(countLens),
))
t.Run("Modify with identity returns same structure", func(t *testing.T) {
count := 5
store := Store{Data: &Data{Count: &count}}
result := L.Modify[Store](F.Identity[Option[*int]])(composedLens)(store)
assert.Equal(t, 5, *result.Data.Count)
})
t.Run("Modify with Some transformation", func(t *testing.T) {
count := 5
store := Store{Data: &Data{Count: &count}}
// Double the count if it exists
doubleCount := O.Map(func(c *int) *int {
doubled := *c * 2
return &doubled
})
result := L.Modify[Store](doubleCount)(composedLens)(store)
assert.Equal(t, 10, *result.Data.Count)
})
t.Run("Modify on empty store", func(t *testing.T) {
store := Store{Data: nil}
doubleCount := O.Map(func(c *int) *int {
doubled := *c * 2
return &doubled
})
result := L.Modify[Store](doubleCount)(composedLens)(store)
// Should remain empty since there's nothing to modify
assert.Nil(t, result.Data)
})
}
// TestComposeMultiLevel tests composing multiple Compose operations
func TestComposeMultiLevel(t *testing.T) {
type Level3 struct {
Value *string
}
type Level2 struct {
Level3 *Level3
}
type Level1 struct {
Level2 *Level2
}
// Create lenses
level2Lens := FromNillable(L.MakeLens(
func(l1 Level1) *Level2 { return l1.Level2 },
func(l1 Level1, l2 *Level2) Level1 { l1.Level2 = l2; return l1 },
))
level3Lens := L.MakeLensRef(
func(l2 *Level2) *Level3 { return l2.Level3 },
func(l2 *Level2, l3 *Level3) *Level2 { l2.Level3 = l3; return l2 },
)
valueLens := L.MakeLensRef(
func(l3 *Level3) *string { return l3.Value },
func(l3 *Level3, v *string) *Level3 { l3.Value = v; return l3 },
)
// Compose: Level1 -> Option[Level2] -> Option[Level3] -> Option[string]
defaultLevel2 := &Level2{Level3: nil}
defaultLevel3 := &Level3{Value: nil}
// First composition: Level1 -> Option[Level3]
level1ToLevel3 := F.Pipe1(level2Lens, Compose[Level1, *Level3](defaultLevel2)(
FromNillable(level3Lens),
))
// Second composition: Level1 -> Option[string]
level1ToValue := F.Pipe1(level1ToLevel3, Compose[Level1, *string](defaultLevel3)(
FromNillable(valueLens),
))
t.Run("Get from fully populated structure", func(t *testing.T) {
value := "test"
l1 := Level1{Level2: &Level2{Level3: &Level3{Value: &value}}}
result := level1ToValue.Get(l1)
assert.True(t, O.IsSome(result))
})
t.Run("Get from partially populated structure", func(t *testing.T) {
l1 := Level1{Level2: &Level2{Level3: &Level3{Value: nil}}}
result := level1ToValue.Get(l1)
assert.True(t, O.IsNone(result))
})
t.Run("Get from empty structure", func(t *testing.T) {
l1 := Level1{Level2: nil}
result := level1ToValue.Get(l1)
assert.True(t, O.IsNone(result))
})
t.Run("Set on empty structure creates all levels", func(t *testing.T) {
l1 := Level1{Level2: nil}
value := "new"
updated := level1ToValue.Set(O.Some(&value))(l1)
assert.NotNil(t, updated.Level2)
assert.NotNil(t, updated.Level2.Level3)
assert.NotNil(t, updated.Level2.Level3.Value)
assert.Equal(t, "new", *updated.Level2.Level3.Value)
})
t.Run("Set None when structure exists unsets value", func(t *testing.T) {
value := "test"
l1 := Level1{Level2: &Level2{Level3: &Level3{Value: &value}}}
updated := level1ToValue.Set(O.None[*string]())(l1)
assert.NotNil(t, updated.Level2)
assert.NotNil(t, updated.Level2.Level3)
assert.Nil(t, updated.Level2.Level3.Value)
})
}
// TestComposeEdgeCasesExtended tests additional edge cases for Compose
func TestComposeEdgeCasesExtended(t *testing.T) {
type Metadata struct {
Tags *[]string
}
type Document struct {
Metadata *Metadata
}
defaultMetadata := &Metadata{Tags: nil}
metadataLens := FromNillable(L.MakeLens(
func(d Document) *Metadata { return d.Metadata },
func(d Document, m *Metadata) Document { d.Metadata = m; return d },
))
tagsLens := L.MakeLensRef(
func(m *Metadata) *[]string { return m.Tags },
func(m *Metadata, t *[]string) *Metadata { m.Tags = t; return m },
)
composedLens := F.Pipe1(metadataLens, Compose[Document, *[]string](defaultMetadata)(
FromNillable(tagsLens),
))
t.Run("Multiple sets with different values", func(t *testing.T) {
doc := Document{Metadata: nil}
tags1 := []string{"tag1"}
tags2 := []string{"tag2", "tag3"}
// Set first value
doc = composedLens.Set(O.Some(&tags1))(doc)
assert.NotNil(t, doc.Metadata)
assert.NotNil(t, doc.Metadata.Tags)
assert.Equal(t, 1, len(*doc.Metadata.Tags))
// Set second value
doc = composedLens.Set(O.Some(&tags2))(doc)
assert.Equal(t, 2, len(*doc.Metadata.Tags))
// Set None
doc = composedLens.Set(O.None[*[]string]())(doc)
assert.NotNil(t, doc.Metadata)
assert.Nil(t, doc.Metadata.Tags)
})
t.Run("Get after Set maintains consistency", func(t *testing.T) {
doc := Document{Metadata: nil}
tags := []string{"test"}
updated := composedLens.Set(O.Some(&tags))(doc)
retrieved := composedLens.Get(updated)
assert.True(t, O.IsSome(retrieved))
})
t.Run("Default values are used when creating structure", func(t *testing.T) {
doc := Document{Metadata: nil}
tags := []string{"new"}
updated := composedLens.Set(O.Some(&tags))(doc)
// Metadata should be created with default (Tags: nil initially, then set)
assert.NotNil(t, updated.Metadata)
assert.NotNil(t, updated.Metadata.Tags)
assert.Equal(t, []string{"new"}, *updated.Metadata.Tags)
})
}

View File

@@ -1,92 +1,95 @@
package option
import (
EM "github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/optics/lens"
LI "github.com/IBM/fp-go/v2/optics/lens/iso"
O "github.com/IBM/fp-go/v2/option"
)
// fromPredicate returns a `Lens` for a property accessibly as a getter and setter that can be optional
// if the optional value is set then the nil value will be set instead
func fromPredicate[GET ~func(S) Option[A], SET ~func(S, Option[A]) S, S, A any](creator func(get GET, set SET) LensO[S, A], pred func(A) bool, nilValue A) func(sa Lens[S, A]) LensO[S, A] {
func fromPredicate[GET ~func(S) Option[A], SET ~func(Option[A]) Endomorphism[S], S, A any](creator func(get GET, set SET) LensO[S, A], pred func(A) bool, nilValue A) func(sa Lens[S, A]) LensO[S, A] {
fromPred := O.FromPredicate(pred)
return func(sa Lens[S, A]) LensO[S, A] {
fold := O.Fold(F.Bind1of1(sa.Set)(nilValue), sa.Set)
return creator(F.Flow2(sa.Get, fromPred), func(s S, a Option[A]) S {
return F.Pipe2(
a,
fold,
EM.Ap(s),
)
})
return creator(F.Flow2(sa.Get, fromPred), O.Fold(F.Bind1of1(sa.Set)(nilValue), sa.Set))
}
}
// FromPredicate returns a `Lens` for a property accessibly as a getter and setter that can be optional
// if the optional value is set then the nil value will be set instead
//
//go:inline
func FromPredicate[S, A any](pred func(A) bool, nilValue A) func(sa Lens[S, A]) LensO[S, A] {
return fromPredicate(lens.MakeLens[func(S) Option[A], func(S, Option[A]) S], pred, nilValue)
return fromPredicate(lens.MakeLensCurried[func(S) Option[A], func(Option[A]) Endomorphism[S]], pred, nilValue)
}
// FromPredicateRef returns a `Lens` for a property accessibly as a getter and setter that can be optional
// if the optional value is set then the nil value will be set instead
func FromPredicateRef[S, A any](pred func(A) bool, nilValue A) func(sa Lens[*S, A]) Lens[*S, Option[A]] {
return fromPredicate(lens.MakeLensRef[func(*S) Option[A], func(*S, Option[A]) *S], pred, nilValue)
//
//go:inline
func FromPredicateRef[S, A any](pred func(A) bool, nilValue A) func(sa Lens[*S, A]) LensO[*S, A] {
return fromPredicate(lens.MakeLensRefCurried[S, Option[A]], pred, nilValue)
}
// FromPredicate returns a `Lens` for a property accessibly as a getter and setter that can be optional
// if the optional value is set then the `nil` value will be set instead
func FromNillable[S, A any](sa Lens[S, *A]) Lens[S, Option[*A]] {
//
//go:inline
func FromNillable[S, A any](sa Lens[S, *A]) LensO[S, *A] {
return FromPredicate[S](F.IsNonNil[A], nil)(sa)
}
// FromNillableRef returns a `Lens` for a property accessibly as a getter and setter that can be optional
// if the optional value is set then the `nil` value will be set instead
func FromNillableRef[S, A any](sa Lens[*S, *A]) Lens[*S, Option[*A]] {
//
//go:inline
func FromNillableRef[S, A any](sa Lens[*S, *A]) LensO[*S, *A] {
return FromPredicateRef[S](F.IsNonNil[A], nil)(sa)
}
// fromNullableProp returns a `Lens` from a property that may be optional. The getter returns a default value for these items
func fromNullableProp[GET ~func(S) A, SET ~func(S, A) S, S, A any](creator func(get GET, set SET) Lens[S, A], isNullable func(A) Option[A], defaultValue A) func(sa Lens[S, A]) Lens[S, A] {
func fromNullableProp[GET ~func(S) A, SET ~func(A) Endomorphism[S], S, A any](creator func(get GET, set SET) Lens[S, A], isNullable O.Kleisli[A, A], defaultValue A) func(sa Lens[S, A]) Lens[S, A] {
orElse := O.GetOrElse(F.Constant(defaultValue))
return func(sa Lens[S, A]) Lens[S, A] {
return creator(F.Flow3(
sa.Get,
isNullable,
O.GetOrElse(F.Constant(defaultValue)),
), func(s S, a A) S {
return sa.Set(a)(s)
},
)
orElse,
), sa.Set)
}
}
// FromNullableProp returns a `Lens` from a property that may be optional. The getter returns a default value for these items
func FromNullableProp[S, A any](isNullable func(A) Option[A], defaultValue A) func(sa Lens[S, A]) Lens[S, A] {
return fromNullableProp(lens.MakeLens[func(S) A, func(S, A) S], isNullable, defaultValue)
//
//go:inline
func FromNullableProp[S, A any](isNullable O.Kleisli[A, A], defaultValue A) lens.Operator[S, A, A] {
return fromNullableProp(lens.MakeLensCurried[func(S) A, func(A) Endomorphism[S]], isNullable, defaultValue)
}
// FromNullablePropRef returns a `Lens` from a property that may be optional. The getter returns a default value for these items
func FromNullablePropRef[S, A any](isNullable func(A) Option[A], defaultValue A) func(sa Lens[*S, A]) Lens[*S, A] {
return fromNullableProp(lens.MakeLensRef[func(*S) A, func(*S, A) *S], isNullable, defaultValue)
//
//go:inline
func FromNullablePropRef[S, A any](isNullable O.Kleisli[A, A], defaultValue A) lens.Operator[*S, A, A] {
return fromNullableProp(lens.MakeLensRefCurried[S, A], isNullable, defaultValue)
}
// fromOption returns a `Lens` from an option property. The getter returns a default value the setter will always set the some option
func fromOption[GET ~func(S) A, SET ~func(S, A) S, S, A any](creator func(get GET, set SET) Lens[S, A], defaultValue A) func(sa LensO[S, A]) Lens[S, A] {
func fromOption[GET ~func(S) A, SET ~func(A) Endomorphism[S], S, A any](creator func(get GET, set SET) Lens[S, A], defaultValue A) func(LensO[S, A]) Lens[S, A] {
orElse := O.GetOrElse(F.Constant(defaultValue))
return func(sa LensO[S, A]) Lens[S, A] {
return creator(F.Flow2(
sa.Get,
O.GetOrElse(F.Constant(defaultValue)),
), func(s S, a A) S {
return sa.Set(O.Some(a))(s)
},
)
orElse,
), F.Flow2(O.Of[A], sa.Set))
}
}
// FromOption returns a `Lens` from an option property. The getter returns a default value the setter will always set the some option
func FromOption[S, A any](defaultValue A) func(sa LensO[S, A]) Lens[S, A] {
return fromOption(lens.MakeLens[func(S) A, func(S, A) S], defaultValue)
//
//go:inline
func FromOption[S, A any](defaultValue A) func(LensO[S, A]) Lens[S, A] {
return fromOption(lens.MakeLensCurried[func(S) A, func(A) Endomorphism[S]], defaultValue)
}
// FromOptionRef creates a lens from an Option property with a default value for pointer structures.
@@ -104,6 +107,76 @@ func FromOption[S, A any](defaultValue A) func(sa LensO[S, A]) Lens[S, A] {
//
// Returns:
// - A function that takes a Lens[*S, Option[A]] and returns a Lens[*S, A]
func FromOptionRef[S, A any](defaultValue A) func(sa Lens[*S, Option[A]]) Lens[*S, A] {
return fromOption(lens.MakeLensRef[func(*S) A, func(*S, A) *S], defaultValue)
//
//go:inline
func FromOptionRef[S, A any](defaultValue A) func(LensO[*S, A]) Lens[*S, A] {
return fromOption(lens.MakeLensRefCurried[S, A], defaultValue)
}
// FromIso converts a Lens[S, A] to a LensO[S, A] using an isomorphism.
//
// This function takes an isomorphism between A and Option[A] and uses it to
// transform a regular lens into an optional lens. It's particularly useful when
// you have a custom isomorphism that defines how to convert between a value
// and its optional representation.
//
// The isomorphism must satisfy the round-trip laws:
// 1. iso.ReverseGet(iso.Get(a)) == a for all a: A
// 2. iso.Get(iso.ReverseGet(opt)) == opt for all opt: Option[A]
//
// Type Parameters:
// - S: The structure type containing the field
// - A: The type of the field being focused on
//
// Parameters:
// - iso: An isomorphism between A and Option[A] that defines the conversion
//
// Returns:
// - A function that takes a Lens[S, A] and returns a LensO[S, A]
//
// Example:
//
// type Config struct {
// timeout int
// }
//
// // Create a lens to the timeout field
// timeoutLens := lens.MakeLens(
// func(c Config) int { return c.timeout },
// func(c Config, t int) Config { c.timeout = t; return c },
// )
//
// // Create an isomorphism that treats 0 as None
// zeroAsNone := iso.MakeIso(
// func(t int) option.Option[int] {
// if t == 0 {
// return option.None[int]()
// }
// return option.Some(t)
// },
// func(opt option.Option[int]) int {
// return option.GetOrElse(func() int { return 0 })(opt)
// },
// )
//
// // Convert to optional lens
// optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
//
// config := Config{timeout: 0}
// opt := optTimeoutLens.Get(config) // None[int]()
// updated := optTimeoutLens.Set(option.Some(30))(config) // Config{timeout: 30}
//
// Common Use Cases:
// - Converting between sentinel values (like 0, -1, "") and Option
// - Applying custom validation logic when converting to/from Option
// - Integrating with existing isomorphisms like FromNillable
//
// See also:
// - FromPredicate: For predicate-based optional conversion
// - FromNillable: For pointer-based optional conversion
// - FromOption: For converting from optional to non-optional with defaults
//
//go:inline
func FromIso[S, A any](iso Iso[A, Option[A]]) func(Lens[S, A]) LensO[S, A] {
return LI.Compose[S](iso)
}

View File

@@ -0,0 +1,479 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package option
import (
"testing"
EQT "github.com/IBM/fp-go/v2/eq/testing"
F "github.com/IBM/fp-go/v2/function"
ISO "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
LT "github.com/IBM/fp-go/v2/optics/lens/testing"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
// Test types
type Config struct {
timeout int
retries int
}
type Settings struct {
maxConnections int
bufferSize int
}
// TestFromIsoBasic tests basic functionality of FromIso
func TestFromIsoBasic(t *testing.T) {
// Create an isomorphism that treats 0 as None
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
// Create a lens to the timeout field
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
// Convert to optional lens using FromIso
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
t.Run("GetNone", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
result := optTimeoutLens.Get(config)
assert.True(t, O.IsNone(result))
})
t.Run("GetSome", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
result := optTimeoutLens.Get(config)
assert.True(t, O.IsSome(result))
assert.Equal(t, 30, O.GetOrElse(F.Constant(0))(result))
})
t.Run("SetNone", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
updated := optTimeoutLens.Set(O.None[int]())(config)
assert.Equal(t, 0, updated.timeout)
assert.Equal(t, 3, updated.retries) // Other fields unchanged
})
t.Run("SetSome", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
updated := optTimeoutLens.Set(O.Some(60))(config)
assert.Equal(t, 60, updated.timeout)
assert.Equal(t, 3, updated.retries) // Other fields unchanged
})
t.Run("SetPreservesOriginal", func(t *testing.T) {
original := Config{timeout: 30, retries: 3}
_ = optTimeoutLens.Set(O.Some(60))(original)
// Original should be unchanged
assert.Equal(t, 30, original.timeout)
assert.Equal(t, 3, original.retries)
})
}
// TestFromIsoWithNegativeSentinel tests using -1 as a sentinel value
func TestFromIsoWithNegativeSentinel(t *testing.T) {
// Create an isomorphism that treats -1 as None
negativeOneAsNone := ISO.MakeIso(
func(n int) O.Option[int] {
if n == -1 {
return O.None[int]()
}
return O.Some(n)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(-1))(opt)
},
)
retriesLens := L.MakeLens(
func(c Config) int { return c.retries },
func(c Config, r int) Config { c.retries = r; return c },
)
optRetriesLens := FromIso[Config, int](negativeOneAsNone)(retriesLens)
t.Run("GetNoneForNegativeOne", func(t *testing.T) {
config := Config{timeout: 30, retries: -1}
result := optRetriesLens.Get(config)
assert.True(t, O.IsNone(result))
})
t.Run("GetSomeForZero", func(t *testing.T) {
config := Config{timeout: 30, retries: 0}
result := optRetriesLens.Get(config)
assert.True(t, O.IsSome(result))
assert.Equal(t, 0, O.GetOrElse(F.Constant(-1))(result))
})
t.Run("SetNoneToNegativeOne", func(t *testing.T) {
config := Config{timeout: 30, retries: 5}
updated := optRetriesLens.Set(O.None[int]())(config)
assert.Equal(t, -1, updated.retries)
})
}
// TestFromIsoLaws verifies that FromIso satisfies lens laws
func TestFromIsoLaws(t *testing.T) {
// Create an isomorphism
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
eqOptInt := O.Eq(EQT.Eq[int]())
eqConfig := EQT.Eq[Config]()
config := Config{timeout: 30, retries: 3}
newValue := O.Some(60)
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := optTimeoutLens.Set(optTimeoutLens.Get(config))(config)
assert.True(t, eqConfig.Equals(config, result))
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := optTimeoutLens.Get(optTimeoutLens.Set(newValue)(config))
assert.True(t, eqOptInt.Equals(newValue, result))
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
a1 := O.Some(60)
a2 := O.None[int]()
result1 := optTimeoutLens.Set(a2)(optTimeoutLens.Set(a1)(config))
result2 := optTimeoutLens.Set(a2)(config)
assert.True(t, eqConfig.Equals(result1, result2))
})
// Use the testing helper to verify all laws
t.Run("AllLaws", func(t *testing.T) {
laws := LT.AssertLaws(t, eqOptInt, eqConfig)(optTimeoutLens)
assert.True(t, laws(config, O.Some(100)))
assert.True(t, laws(Config{timeout: 0, retries: 5}, O.None[int]()))
})
}
// TestFromIsoComposition tests composing FromIso with other lenses
func TestFromIsoComposition(t *testing.T) {
type Application struct {
config Config
}
// Isomorphism for zero as none
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
// Lens to config field
configLens := L.MakeLens(
func(a Application) Config { return a.config },
func(a Application, c Config) Application { a.config = c; return a },
)
// Lens to timeout field
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
// Compose: Application -> Config -> timeout (as Option)
optTimeoutFromConfig := FromIso[Config, int](zeroAsNone)(timeoutLens)
optTimeoutFromApp := F.Pipe1(
configLens,
L.Compose[Application](optTimeoutFromConfig),
)
app := Application{config: Config{timeout: 0, retries: 3}}
t.Run("ComposedGet", func(t *testing.T) {
result := optTimeoutFromApp.Get(app)
assert.True(t, O.IsNone(result))
})
t.Run("ComposedSet", func(t *testing.T) {
updated := optTimeoutFromApp.Set(O.Some(45))(app)
assert.Equal(t, 45, updated.config.timeout)
assert.Equal(t, 3, updated.config.retries)
})
}
// TestFromIsoModify tests using Modify with FromIso-based lenses
func TestFromIsoModify(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
t.Run("ModifyNoneToSome", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
// Map None to Some(10)
modified := L.Modify[Config](O.Map(func(x int) int { return x + 10 }))(optTimeoutLens)(config)
// Since it was None, Map doesn't apply, stays None (0)
assert.Equal(t, 0, modified.timeout)
})
t.Run("ModifySomeValue", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
// Double the timeout value
modified := L.Modify[Config](O.Map(func(x int) int { return x * 2 }))(optTimeoutLens)(config)
assert.Equal(t, 60, modified.timeout)
})
t.Run("ModifyWithAlt", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
// Use Alt to provide a default
modified := L.Modify[Config](func(opt O.Option[int]) O.Option[int] {
return O.Alt(F.Constant(O.Some(10)))(opt)
})(optTimeoutLens)(config)
assert.Equal(t, 10, modified.timeout)
})
}
// TestFromIsoWithStringEmpty tests using empty string as None
func TestFromIsoWithStringEmpty(t *testing.T) {
type User struct {
name string
email string
}
// Isomorphism that treats empty string as None
emptyAsNone := ISO.MakeIso(
func(s string) O.Option[string] {
if s == "" {
return O.None[string]()
}
return O.Some(s)
},
func(opt O.Option[string]) string {
return O.GetOrElse(F.Constant(""))(opt)
},
)
emailLens := L.MakeLens(
func(u User) string { return u.email },
func(u User, e string) User { u.email = e; return u },
)
optEmailLens := FromIso[User, string](emptyAsNone)(emailLens)
t.Run("EmptyStringAsNone", func(t *testing.T) {
user := User{name: "Alice", email: ""}
result := optEmailLens.Get(user)
assert.True(t, O.IsNone(result))
})
t.Run("NonEmptyStringAsSome", func(t *testing.T) {
user := User{name: "Alice", email: "alice@example.com"}
result := optEmailLens.Get(user)
assert.True(t, O.IsSome(result))
assert.Equal(t, "alice@example.com", O.GetOrElse(F.Constant(""))(result))
})
t.Run("SetNoneToEmpty", func(t *testing.T) {
user := User{name: "Alice", email: "alice@example.com"}
updated := optEmailLens.Set(O.None[string]())(user)
assert.Equal(t, "", updated.email)
})
}
// TestFromIsoRoundTrip tests round-trip conversions
func TestFromIsoRoundTrip(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
maxConnectionsLens := L.MakeLens(
func(s Settings) int { return s.maxConnections },
func(s Settings, m int) Settings { s.maxConnections = m; return s },
)
optMaxConnectionsLens := FromIso[Settings, int](zeroAsNone)(maxConnectionsLens)
t.Run("RoundTripThroughGet", func(t *testing.T) {
settings := Settings{maxConnections: 100, bufferSize: 1024}
// Get the value, then Set it back
opt := optMaxConnectionsLens.Get(settings)
restored := optMaxConnectionsLens.Set(opt)(settings)
assert.Equal(t, settings, restored)
})
t.Run("RoundTripThroughSet", func(t *testing.T) {
settings := Settings{maxConnections: 0, bufferSize: 1024}
// Set a new value, then Get it
newOpt := O.Some(200)
updated := optMaxConnectionsLens.Set(newOpt)(settings)
retrieved := optMaxConnectionsLens.Get(updated)
assert.True(t, O.Eq(EQT.Eq[int]()).Equals(newOpt, retrieved))
})
t.Run("RoundTripWithNone", func(t *testing.T) {
settings := Settings{maxConnections: 100, bufferSize: 1024}
// Set None, then get it back
updated := optMaxConnectionsLens.Set(O.None[int]())(settings)
retrieved := optMaxConnectionsLens.Get(updated)
assert.True(t, O.IsNone(retrieved))
})
}
// TestFromIsoChaining tests chaining multiple FromIso transformations
func TestFromIsoChaining(t *testing.T) {
// Create two different isomorphisms
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
config := Config{timeout: 30, retries: 3}
t.Run("ChainedOperations", func(t *testing.T) {
// Chain multiple operations
result := F.Pipe2(
config,
optTimeoutLens.Set(O.Some(60)),
optTimeoutLens.Set(O.None[int]()),
)
assert.Equal(t, 0, result.timeout)
})
}
// TestFromIsoMultipleFields tests using FromIso on multiple fields
func TestFromIsoMultipleFields(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
retriesLens := L.MakeLens(
func(c Config) int { return c.retries },
func(c Config, r int) Config { c.retries = r; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
optRetriesLens := FromIso[Config, int](zeroAsNone)(retriesLens)
t.Run("IndependentFields", func(t *testing.T) {
config := Config{timeout: 0, retries: 5}
// Get both fields
timeoutOpt := optTimeoutLens.Get(config)
retriesOpt := optRetriesLens.Get(config)
assert.True(t, O.IsNone(timeoutOpt))
assert.True(t, O.IsSome(retriesOpt))
assert.Equal(t, 5, O.GetOrElse(F.Constant(0))(retriesOpt))
})
t.Run("SetBothFields", func(t *testing.T) {
config := Config{timeout: 0, retries: 0}
// Set both fields
updated := F.Pipe2(
config,
optTimeoutLens.Set(O.Some(30)),
optRetriesLens.Set(O.Some(3)),
)
assert.Equal(t, 30, updated.timeout)
assert.Equal(t, 3, updated.retries)
})
}

View File

@@ -16,7 +16,6 @@
package option
import (
L "github.com/IBM/fp-go/v2/optics/lens"
LG "github.com/IBM/fp-go/v2/optics/lens/generic"
T "github.com/IBM/fp-go/v2/optics/traversal/option"
O "github.com/IBM/fp-go/v2/option"
@@ -60,6 +59,6 @@ import (
// // Now can use traversal operations
// configs := []Config{{Timeout: O.Some(30)}, {Timeout: O.None[int]()}}
// // Apply operations across all configs using the traversal
func AsTraversal[S, A any]() func(L.Lens[S, A]) T.Traversal[S, A] {
func AsTraversal[S, A any]() func(Lens[S, A]) T.Traversal[S, A] {
return LG.AsTraversal[T.Traversal[S, A]](O.MonadMap[A, S])
}

View File

@@ -17,8 +17,10 @@ package option
import (
"github.com/IBM/fp-go/v2/endomorphism"
"github.com/IBM/fp-go/v2/optics/iso"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
)
type (
@@ -91,4 +93,9 @@ type (
// optLens := lens.FromNillableRef(timeoutLens)
// // optLens is a LensO[*Config, *int]
LensO[S, A any] = Lens[S, Option[A]]
Kleisli[S, A, B any] = reader.Reader[A, LensO[S, B]]
Operator[S, A, B any] = Kleisli[S, LensO[S, A], B]
Iso[S, A any] = iso.Iso[S, A]
)

View File

@@ -80,4 +80,7 @@ type (
// with the focused value updated to a. The original structure is never modified.
Set func(a A) Endomorphism[S]
}
Kleisli[S, A, B any] = func(A) Lens[S, B]
Operator[S, A, B any] = Kleisli[S, Lens[S, A], B]
)

477
v2/optics/optional/doc.go Normal file
View File

@@ -0,0 +1,477 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package optional provides optional optics for focusing on values that may not exist.
# Overview
An Optional is an optic that focuses on a subpart of a data structure that may or may not
be present. Unlike lenses which always focus on an existing field, optionals handle cases
where the target value might be absent, returning Option[A] instead of A.
Optionals are the bridge between lenses (which always succeed) and prisms (which may fail
to match). They combine aspects of both:
- Like lenses: Focus on a specific location in a structure
- Like prisms: The value at that location may not exist
Optionals are essential for:
- Working with nullable fields (pointers that may be nil)
- Accessing nested optional values
- Conditional updates based on value presence
- Safe navigation through potentially missing data
# Mathematical Foundation
An Optional[S, A] consists of two operations:
- GetOption: S → Option[A] (try to extract A from S, may return None)
- Set: A → S → S (update A in S, may be a no-op if value doesn't exist)
Optionals must satisfy the optional laws:
1. GetOptionSet: if GetOption(s) == Some(a), then GetOption(Set(a)(s)) == Some(a)
2. SetGetOption: if GetOption(s) == Some(a), then Set(a)(s) preserves other parts of s
3. SetSet: Set(a2)(Set(a1)(s)) == Set(a2)(s)
# Basic Usage
Creating an optional for a nullable field:
type Config struct {
Timeout *int
MaxSize *int
}
timeoutOptional := optional.MakeOptional(
func(c Config) option.Option[*int] {
return option.FromNillable(c.Timeout)
},
func(c Config, t *int) Config {
c.Timeout = t
return c
},
)
config := Config{Timeout: nil, MaxSize: ptr(100)}
// Get returns None for nil
timeout := timeoutOptional.GetOption(config) // None[*int]
// Set updates the value
newTimeout := 30
updated := timeoutOptional.Set(&newTimeout)(config)
// updated.Timeout points to 30
# Working with Pointers
For pointer-based structures, use MakeOptionalRef which handles copying automatically:
type Database struct {
Host string
Port int
}
type Config struct {
Database *Database
}
dbOptional := optional.MakeOptionalRef(
func(c *Config) option.Option[*Database] {
return option.FromNillable(c.Database)
},
func(c *Config, db *Database) *Config {
c.Database = db
return c
},
)
config := &Config{Database: nil}
// Get returns None when database is nil
db := dbOptional.GetOption(config) // None[*Database]
// Set creates a new config with the database
newDB := &Database{Host: "localhost", Port: 5432}
updated := dbOptional.Set(newDB)(config)
// config.Database is still nil, updated.Database points to newDB
# Identity Optional
The identity optional focuses on the entire structure:
idOpt := optional.Id[Config]()
config := Config{Timeout: ptr(30)}
value := idOpt.GetOption(config) // Some(config)
updated := idOpt.Set(Config{Timeout: ptr(60)})(config)
# Composing Optionals
Optionals can be composed to navigate through nested optional structures:
type Address struct {
Street string
City string
}
type Person struct {
Name string
Address *Address
}
addressOpt := optional.MakeOptional(
func(p Person) option.Option[*Address] {
return option.FromNillable(p.Address)
},
func(p Person, a *Address) Person {
p.Address = a
return p
},
)
cityOpt := optional.MakeOptionalRef(
func(a *Address) option.Option[string] {
if a == nil {
return option.None[string]()
}
return option.Some(a.City)
},
func(a *Address, city string) *Address {
a.City = city
return a
},
)
// Compose to access city from person
personCityOpt := F.Pipe1(
addressOpt,
optional.Compose[Person, *Address, string](cityOpt),
)
person := Person{Name: "Alice", Address: nil}
// Get returns None when address is nil
city := personCityOpt.GetOption(person) // None[string]
// Set updates the city if address exists
withAddress := Person{
Name: "Alice",
Address: &Address{Street: "Main St", City: "NYC"},
}
updated := personCityOpt.Set("Boston")(withAddress)
// updated.Address.City == "Boston"
# From Predicate
Create an optional that only focuses on values satisfying a predicate:
type User struct {
Age int
}
ageOpt := optional.FromPredicate[User, int](
func(age int) bool { return age >= 18 },
)(
func(u User) int { return u.Age },
func(u User, age int) User {
u.Age = age
return u
},
)
adult := User{Age: 25}
age := ageOpt.GetOption(adult) // Some(25)
minor := User{Age: 15}
minorAge := ageOpt.GetOption(minor) // None[int]
// Set only works if predicate is satisfied
updated := ageOpt.Set(30)(adult) // Age becomes 30
unchanged := ageOpt.Set(30)(minor) // Age stays 15 (predicate fails)
# Modifying Values
Use ModifyOption to transform values that exist:
type Counter struct {
Value *int
}
valueOpt := optional.MakeOptional(
func(c Counter) option.Option[*int] {
return option.FromNillable(c.Value)
},
func(c Counter, v *int) Counter {
c.Value = v
return c
},
)
counter := Counter{Value: ptr(5)}
// Increment if value exists
incremented := F.Pipe3(
counter,
valueOpt,
optional.ModifyOption[Counter, *int](func(v *int) *int {
newVal := *v + 1
return &newVal
}),
option.GetOrElse(F.Constant(counter)),
)
// incremented.Value points to 6
// No change if value is nil
nilCounter := Counter{Value: nil}
result := F.Pipe3(
nilCounter,
valueOpt,
optional.ModifyOption[Counter, *int](func(v *int) *int {
newVal := *v + 1
return &newVal
}),
option.GetOrElse(F.Constant(nilCounter)),
)
// result.Value is still nil
# Bidirectional Mapping
Transform the focus type of an optional:
type Celsius float64
type Fahrenheit float64
type Weather struct {
Temperature *Celsius
}
tempCelsiusOpt := optional.MakeOptional(
func(w Weather) option.Option[*Celsius] {
return option.FromNillable(w.Temperature)
},
func(w Weather, t *Celsius) Weather {
w.Temperature = t
return w
},
)
// Create optional that works with Fahrenheit
tempFahrenheitOpt := F.Pipe1(
tempCelsiusOpt,
optional.IMap[Weather, *Celsius, *Fahrenheit](
func(c *Celsius) *Fahrenheit {
f := Fahrenheit(*c*9/5 + 32)
return &f
},
func(f *Fahrenheit) *Celsius {
c := Celsius((*f - 32) * 5 / 9)
return &c
},
),
)
celsius := Celsius(20)
weather := Weather{Temperature: &celsius}
tempF := tempFahrenheitOpt.GetOption(weather) // Some(68°F)
# Real-World Example: Configuration with Defaults
type DatabaseConfig struct {
Host string
Port int
Username string
Password string
}
type AppConfig struct {
Database *DatabaseConfig
Debug bool
}
dbOpt := optional.MakeOptional(
func(c AppConfig) option.Option[*DatabaseConfig] {
return option.FromNillable(c.Database)
},
func(c AppConfig, db *DatabaseConfig) AppConfig {
c.Database = db
return c
},
)
dbHostOpt := optional.MakeOptionalRef(
func(db *DatabaseConfig) option.Option[string] {
if db == nil {
return option.None[string]()
}
return option.Some(db.Host)
},
func(db *DatabaseConfig, host string) *DatabaseConfig {
db.Host = host
return db
},
)
// Compose to access database host
appDbHostOpt := F.Pipe1(
dbOpt,
optional.Compose[AppConfig, *DatabaseConfig, string](dbHostOpt),
)
config := AppConfig{Database: nil, Debug: true}
// Get returns None when database is not configured
host := appDbHostOpt.GetOption(config) // None[string]
// Set creates database if needed
withDB := AppConfig{
Database: &DatabaseConfig{Host: "localhost", Port: 5432},
Debug: true,
}
updated := appDbHostOpt.Set("prod.example.com")(withDB)
// updated.Database.Host == "prod.example.com"
# Real-World Example: Safe Navigation
type Company struct {
Name string
CEO *Person
}
type Person struct {
Name string
Address *Address
}
type Address struct {
City string
}
ceoOpt := optional.MakeOptional(
func(c Company) option.Option[*Person] {
return option.FromNillable(c.CEO)
},
func(c Company, p *Person) Company {
c.CEO = p
return c
},
)
addressOpt := optional.MakeOptionalRef(
func(p *Person) option.Option[*Address] {
return option.FromNillable(p.Address)
},
func(p *Person, a *Address) *Person {
p.Address = a
return p
},
)
cityOpt := optional.MakeOptionalRef(
func(a *Address) option.Option[string] {
if a == nil {
return option.None[string]()
}
return option.Some(a.City)
},
func(a *Address, city string) *Address {
a.City = city
return a
},
)
// Compose all optionals for safe navigation
ceoCityOpt := F.Pipe2(
ceoOpt,
optional.Compose[Company, *Person, *Address](addressOpt),
optional.Compose[Company, *Address, string](cityOpt),
)
company := Company{Name: "Acme Corp", CEO: nil}
// Safe navigation returns None at any missing level
city := ceoCityOpt.GetOption(company) // None[string]
# Optionals in the Optics Hierarchy
Optionals sit between lenses and traversals in the optics hierarchy:
Lens[S, A]
Optional[S, A]
Traversal[S, A]
Prism[S, A]
Optional[S, A]
This means:
- Every Lens can be converted to an Optional (value always exists)
- Every Prism can be converted to an Optional (variant may not match)
- Every Optional can be converted to a Traversal (0 or 1 values)
# Performance Considerations
Optionals are efficient:
- No reflection - all operations are type-safe at compile time
- Minimal allocations - optionals themselves are lightweight
- GetOption short-circuits on None
- Set operations create new copies (immutability)
For best performance:
- Use MakeOptionalRef for pointer structures to ensure proper copying
- Cache composed optionals rather than recomposing
- Consider batch operations when updating multiple optional values
# Type Safety
Optionals are fully type-safe:
- Compile-time type checking
- No runtime type assertions
- Generic type parameters ensure correctness
- Composition maintains type relationships
# Function Reference
Core Optional Creation:
- MakeOptional: Create an optional from getter and setter functions
- MakeOptionalRef: Create an optional for pointer-based structures
- Id: Create an identity optional
- IdRef: Create an identity optional for pointers
Composition:
- Compose: Compose two optionals
- ComposeRef: Compose optionals for pointer structures
Transformation:
- ModifyOption: Transform a value through an optional (returns Option[S])
- SetOption: Set a value through an optional (returns Option[S])
- IMap: Bidirectionally map an optional
- IChain: Bidirectionally map with optional results
- IChainAny: Map to/from any type
Predicate-Based:
- FromPredicate: Create optional from predicate
- FromPredicateRef: Create optional from predicate (ref version)
# Related Packages
- github.com/IBM/fp-go/v2/optics/lens: Lenses for fields that always exist
- github.com/IBM/fp-go/v2/optics/prism: Prisms for sum types
- github.com/IBM/fp-go/v2/optics/traversal: Traversals for multiple values
- github.com/IBM/fp-go/v2/option: Optional values
- github.com/IBM/fp-go/v2/endomorphism: Endomorphisms (A → A functions)
*/
package optional

493
v2/optics/traversal/doc.go Normal file
View File

@@ -0,0 +1,493 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package traversal provides traversals - optics for focusing on multiple values simultaneously.
# Overview
A Traversal is an optic that focuses on zero or more values within a data structure,
allowing you to view, modify, or fold over multiple elements at once. Unlike lenses
which focus on a single field, or prisms which focus on one variant, traversals can
target collections, multiple fields, or any number of values.
Traversals are the most general optic and sit at the bottom of the optics hierarchy.
They are essential for:
- Working with collections (arrays, slices, maps)
- Batch operations on multiple fields
- Filtering and transforming multiple values
- Aggregating data from multiple sources
- Applying the same operation to all matching elements
# Mathematical Foundation
A Traversal[S, A] is defined using higher-kinded types and applicative functors.
In practical terms, it provides operations to:
- Modify: Apply a function to all focused values
- Set: Replace all focused values with a constant
- FoldMap: Map each value to a monoid and combine results
- GetAll: Collect all focused values into a list
Traversals must satisfy the traversal laws:
1. Identity: traverse(Identity, id) == Identity
2. Composition: traverse(Compose(F, G), f) == Compose(traverse(F, traverse(G, f)))
These laws ensure that traversals compose properly and behave consistently.
# Basic Usage
Creating a traversal for array elements:
import (
A "github.com/IBM/fp-go/v2/array"
T "github.com/IBM/fp-go/v2/optics/traversal"
TA "github.com/IBM/fp-go/v2/optics/traversal/array"
)
numbers := []int{1, 2, 3, 4, 5}
// Get all elements
all := T.GetAll(numbers)(TA.Traversal[int]())
// Result: [1, 2, 3, 4, 5]
// Modify all elements
doubled := F.Pipe2(
numbers,
TA.Traversal[int](),
T.Modify[[]int, int](func(n int) int { return n * 2 }),
)
// Result: [2, 4, 6, 8, 10]
// Set all elements to a constant
allTens := F.Pipe2(
numbers,
TA.Traversal[int](),
T.Set[[]int, int](10),
)
// Result: [10, 10, 10, 10, 10]
# Identity Traversal
The identity traversal focuses on the entire structure:
idTrav := T.Id[int, int]()
value := 42
result := T.Modify[int, int](func(n int) int { return n * 2 })(idTrav)(value)
// Result: 84
# Folding with Traversals
Aggregate values using monoids:
import (
M "github.com/IBM/fp-go/v2/monoid"
N "github.com/IBM/fp-go/v2/number"
)
numbers := []int{1, 2, 3, 4, 5}
// Sum all elements
sum := F.Pipe2(
numbers,
TA.Traversal[int](),
T.FoldMap[int, []int, int](F.Identity[int]),
)(N.MonoidSum[int]())
// Result: 15
// Product of all elements
product := F.Pipe2(
numbers,
TA.Traversal[int](),
T.FoldMap[int, []int, int](F.Identity[int]),
)(N.MonoidProduct[int]())
// Result: 120
# Composing Traversals
Traversals can be composed to focus on nested collections:
type Person struct {
Name string
Friends []string
}
people := []Person{
{Name: "Alice", Friends: []string{"Bob", "Charlie"}},
{Name: "Bob", Friends: []string{"Alice", "David"}},
}
// Traversal for people array
peopleTrav := TA.Traversal[Person]()
// Traversal for friends array within a person
friendsTrav := T.MakeTraversal(func(p Person) []string {
return p.Friends
})
// Compose to access all friends of all people
allFriendsTrav := F.Pipe1(
peopleTrav,
T.Compose[[]Person, Person, string, ...](friendsTrav),
)
// Get all friends
allFriends := T.GetAll(people)(allFriendsTrav)
// Result: ["Bob", "Charlie", "Alice", "David"]
# Working with Records (Maps)
Traverse over map values:
import TR "github.com/IBM/fp-go/v2/optics/traversal/record"
scores := map[string]int{
"Alice": 85,
"Bob": 92,
"Charlie": 78,
}
// Get all scores
allScores := F.Pipe2(
scores,
TR.Traversal[string, int](),
T.GetAll[map[string]int, int],
)
// Result: [85, 92, 78] (order may vary)
// Increase all scores by 5
boosted := F.Pipe2(
scores,
TR.Traversal[string, int](),
T.Modify[map[string]int, int](func(score int) int {
return score + 5
}),
)
// Result: {"Alice": 90, "Bob": 97, "Charlie": 83}
# Working with Either Types
Traverse over the Right values:
import (
E "github.com/IBM/fp-go/v2/either"
TE "github.com/IBM/fp-go/v2/optics/traversal/either"
)
results := []E.Either[string, int]{
E.Right[string](10),
E.Left[int]("error"),
E.Right[string](20),
}
// Traversal for array of Either
arrayTrav := TA.Traversal[E.Either[string, int]]()
// Traversal for Right values
rightTrav := TE.Traversal[string, int]()
// Compose to access all Right values
allRightsTrav := F.Pipe1(
arrayTrav,
T.Compose[[]E.Either[string, int], E.Either[string, int], int, ...](rightTrav),
)
// Get all Right values
rights := T.GetAll(results)(allRightsTrav)
// Result: [10, 20]
// Double all Right values
doubled := F.Pipe2(
results,
allRightsTrav,
T.Modify[[]E.Either[string, int], int](func(n int) int { return n * 2 }),
)
// Result: [Right(20), Left("error"), Right(40)]
# Working with Option Types
Traverse over Some values:
import (
O "github.com/IBM/fp-go/v2/option"
TO "github.com/IBM/fp-go/v2/optics/traversal/option"
)
values := []O.Option[int]{
O.Some(1),
O.None[int](),
O.Some(2),
O.None[int](),
O.Some(3),
}
// Compose array and option traversals
allSomesTrav := F.Pipe1(
TA.Traversal[O.Option[int]](),
T.Compose[[]O.Option[int], O.Option[int], int, ...](TO.Traversal[int]()),
)
// Get all Some values
somes := T.GetAll(values)(allSomesTrav)
// Result: [1, 2, 3]
// Increment all Some values
incremented := F.Pipe2(
values,
allSomesTrav,
T.Modify[[]O.Option[int], int](func(n int) int { return n + 1 }),
)
// Result: [Some(2), None, Some(3), None, Some(4)]
# Real-World Example: Nested Data Structures
type Department struct {
Name string
Employees []Employee
}
type Employee struct {
Name string
Salary int
}
company := []Department{
{
Name: "Engineering",
Employees: []Employee{
{Name: "Alice", Salary: 100000},
{Name: "Bob", Salary: 95000},
},
},
{
Name: "Sales",
Employees: []Employee{
{Name: "Charlie", Salary: 80000},
{Name: "David", Salary: 85000},
},
},
}
// Traversal for departments
deptTrav := TA.Traversal[Department]()
// Traversal for employees within a department
empTrav := T.MakeTraversal(func(d Department) []Employee {
return d.Employees
})
// Traversal for employee array
empArrayTrav := TA.Traversal[Employee]()
// Compose to access all employees
allEmpTrav := F.Pipe2(
deptTrav,
T.Compose[[]Department, Department, []Employee, ...](empTrav),
T.Compose[[]Department, []Employee, Employee, ...](empArrayTrav),
)
// Get all employee names
names := F.Pipe2(
company,
allEmpTrav,
T.FoldMap[[]string, []Department, Employee](func(e Employee) []string {
return []string{e.Name}
}),
)(A.Monoid[string]())
// Result: ["Alice", "Bob", "Charlie", "David"]
// Give everyone a 10% raise
withRaises := F.Pipe2(
company,
allEmpTrav,
T.Modify[[]Department, Employee](func(e Employee) Employee {
e.Salary = int(float64(e.Salary) * 1.1)
return e
}),
)
# Real-World Example: Filtering with Traversals
type Product struct {
Name string
Price float64
InStock bool
}
products := []Product{
{Name: "Laptop", Price: 999.99, InStock: true},
{Name: "Mouse", Price: 29.99, InStock: false},
{Name: "Keyboard", Price: 79.99, InStock: true},
}
// Create a traversal that only focuses on in-stock products
inStockTrav := T.MakeTraversal(func(ps []Product) []Product {
return A.Filter(func(p Product) bool {
return p.InStock
})(ps)
})
// Apply discount to in-stock items
discounted := F.Pipe2(
products,
inStockTrav,
T.Modify[[]Product, Product](func(p Product) Product {
p.Price = p.Price * 0.9
return p
}),
)
// Only Laptop and Keyboard prices are reduced
# Real-World Example: Data Aggregation
type Order struct {
ID string
Items []OrderItem
Status string
}
type OrderItem struct {
Product string
Quantity int
Price float64
}
orders := []Order{
{
ID: "001",
Items: []OrderItem{
{Product: "Widget", Quantity: 2, Price: 10.0},
{Product: "Gadget", Quantity: 1, Price: 25.0},
},
Status: "completed",
},
{
ID: "002",
Items: []OrderItem{
{Product: "Widget", Quantity: 5, Price: 10.0},
},
Status: "completed",
},
}
// Traversal for orders
orderTrav := TA.Traversal[Order]()
// Traversal for items within an order
itemsTrav := T.MakeTraversal(func(o Order) []OrderItem {
return o.Items
})
// Traversal for item array
itemArrayTrav := TA.Traversal[OrderItem]()
// Compose to access all items
allItemsTrav := F.Pipe2(
orderTrav,
T.Compose[[]Order, Order, []OrderItem, ...](itemsTrav),
T.Compose[[]Order, []OrderItem, OrderItem, ...](itemArrayTrav),
)
// Calculate total revenue
totalRevenue := F.Pipe2(
orders,
allItemsTrav,
T.FoldMap[float64, []Order, OrderItem](func(item OrderItem) float64 {
return float64(item.Quantity) * item.Price
}),
)(N.MonoidSum[float64]())
// Result: 95.0 (2*10 + 1*25 + 5*10)
# Traversals in the Optics Hierarchy
Traversals are the most general optic:
Iso[S, A]
Lens[S, A]
Optional[S, A]
Traversal[S, A]
Prism[S, A]
Optional[S, A]
Traversal[S, A]
This means:
- Every Iso, Lens, Prism, and Optional can be converted to a Traversal
- Traversals are the most flexible but least specific optic
- Use more specific optics when possible for better type safety
# Performance Considerations
Traversals can be efficient but consider:
- Each traversal operation may iterate over all elements
- Composition creates nested iterations
- FoldMap is often more efficient than GetAll followed by reduction
- Modify creates new copies (immutability)
For best performance:
- Use specialized traversals (array, record, etc.) when available
- Avoid unnecessary composition
- Consider batch operations
- Cache composed traversals
# Type Safety
Traversals are fully type-safe:
- Compile-time type checking
- Generic type parameters ensure correctness
- Composition maintains type relationships
- No runtime type assertions
# Function Reference
Core Functions:
- Id: Create an identity traversal
- Modify: Apply a function to all focused values
- Set: Replace all focused values with a constant
- Compose: Compose two traversals
Aggregation:
- FoldMap: Map each value to a monoid and combine
- Fold: Fold over all values using a monoid
- GetAll: Collect all focused values into a list
# Specialized Traversals
The package includes specialized sub-packages for common patterns:
- array: Traversals for arrays and slices
- record: Traversals for maps
- either: Traversals for Either types
- option: Traversals for Option types
Each specialized package provides optimized implementations for its data type.
# Related Packages
- github.com/IBM/fp-go/v2/optics/lens: Lenses for single fields
- github.com/IBM/fp-go/v2/optics/prism: Prisms for sum types
- github.com/IBM/fp-go/v2/optics/optional: Optionals for maybe values
- github.com/IBM/fp-go/v2/optics/traversal/array: Array traversals
- github.com/IBM/fp-go/v2/optics/traversal/record: Record/map traversals
- github.com/IBM/fp-go/v2/optics/traversal/either: Either traversals
- github.com/IBM/fp-go/v2/optics/traversal/option: Option traversals
- github.com/IBM/fp-go/v2/array: Array utilities
- github.com/IBM/fp-go/v2/monoid: Monoid type class
*/
package traversal

View File

@@ -278,7 +278,7 @@ func Local[A, R2, R1 any](f func(R2) R1) func(Reader[R1, A]) Reader[R2, A] {
// getPort := reader.Asks(func(c Config) int { return c.Port })
// run := reader.Read(Config{Port: 8080})
// port := run(getPort) // 8080
func Read[E, A any](e E) func(Reader[E, A]) A {
func Read[A, E any](e E) func(Reader[E, A]) A {
return I.Ap[A](e)
}

View File

@@ -175,7 +175,7 @@ func TestLocal(t *testing.T) {
func TestRead(t *testing.T) {
config := Config{Port: 8080}
getPort := Asks(func(c Config) int { return c.Port })
run := Read[Config, int](config)
run := Read[int](config)
port := run(getPort)
assert.Equal(t, 8080, port)
}

View File

@@ -153,7 +153,7 @@ func Local[E, A, R2, R1 any](f func(R2) R1) func(ReaderEither[R1, E, A]) ReaderE
// Read applies a context to a reader to obtain its value
func Read[E1, A, E any](e E) func(ReaderEither[E, E1, A]) Either[E1, A] {
return reader.Read[E, Either[E1, A]](e)
return reader.Read[Either[E1, A]](e)
}
func MonadFlap[L, E, A, B any](fab ReaderEither[L, E, func(A) B], a A) ReaderEither[L, E, B] {

View File

@@ -750,3 +750,8 @@ func MapLeft[R, A, E1, E2 any](f func(E1) E2) func(ReaderIOEither[R, E1, A]) Rea
func Local[E, A, R1, R2 any](f func(R2) R1) func(ReaderIOEither[R1, E, A]) ReaderIOEither[R2, E, A] {
return reader.Local[IOEither[E, A]](f)
}
//go:inline
func Read[E, A, R any](r R) func(ReaderIOEither[R, E, A]) IOEither[E, A] {
return reader.Read[IOEither[E, A]](r)
}

View File

@@ -56,5 +56,3 @@ func TestApS(t *testing.T) {
assert.Equal(t, res(context.Background())(), result.Of("John Doe"))
}
// Made with Bob

View File

@@ -677,3 +677,8 @@ func MapLeft[R, A, E any](f func(error) E) func(ReaderIOResult[R, A]) RIOE.Reade
func Local[A, R1, R2 any](f func(R2) R1) func(ReaderIOResult[R1, A]) ReaderIOResult[R2, A] {
return RIOE.Local[error, A](f)
}
//go:inline
func Read[A, R any](r R) func(ReaderIOResult[R, A]) IOResult[A] {
return RIOE.Read[error, A](r)
}

View File

@@ -160,5 +160,3 @@ func TestUncurry3(t *testing.T) {
assert.True(t, ok)
assert.Equal(t, 42, result)
}
// Made with Bob

View File

@@ -110,5 +110,3 @@ func TestFrom3(t *testing.T) {
result2 := roFunc("test", 5, false)(ctx1)
assert.Equal(t, O.None[string](), result2)
}
// Made with Bob

View File

@@ -82,11 +82,11 @@ func FromPredicate[GEA ~func(E) O.Option[A], E, A any](pred func(A) bool) func(A
}
func Fold[GEA ~func(E) O.Option[A], GB ~func(E) B, E, A, B any](onNone func() GB, onRight func(A) GB) func(GEA) GB {
return optiont.MatchE(R.MonadChain[GEA, GB, E, O.Option[A], B], onNone, onRight)
return optiont.MatchE(R.Chain[GEA, GB, E, O.Option[A], B], onNone, onRight)
}
func GetOrElse[GEA ~func(E) O.Option[A], GA ~func(E) A, E, A any](onNone func() GA) func(GEA) GA {
return optiont.GetOrElse(R.MonadChain[GEA, GA, E, O.Option[A], A], onNone, R.Of[GA, E, A])
return optiont.GetOrElse(R.Chain[GEA, GA, E, O.Option[A], A], onNone, R.Of[GA, E, A])
}
func Ask[GEE ~func(E) O.Option[E], E, L any]() GEE {

View File

@@ -36,6 +36,8 @@ func FromOption[E, A any](e Option[A]) ReaderOption[E, A] {
// Some wraps a value in a ReaderOption, representing a successful computation.
// This is equivalent to Of but more explicit about the Option semantics.
//
//go:inline
func Some[E, A any](r A) ReaderOption[E, A] {
return optiont.Of(reader.Of[E, Option[A]], r)
}
@@ -50,6 +52,8 @@ func FromReader[E, A any](r Reader[E, A]) ReaderOption[E, A] {
// SomeReader lifts a Reader[E, A] into a ReaderOption[E, A].
// The resulting computation always succeeds (returns Some).
//
//go:inline
func SomeReader[E, A any](r Reader[E, A]) ReaderOption[E, A] {
return optiont.SomeF(reader.MonadMap[E, A, Option[A]], r)
}
@@ -61,6 +65,8 @@ func SomeReader[E, A any](r Reader[E, A]) ReaderOption[E, A] {
//
// ro := readeroption.Of[Config](42)
// doubled := readeroption.MonadMap(ro, func(x int) int { return x * 2 })
//
//go:inline
func MonadMap[E, A, B any](fa ReaderOption[E, A], f func(A) B) ReaderOption[E, B] {
return readert.MonadMap[ReaderOption[E, A], ReaderOption[E, B]](O.MonadMap[A, B], fa, f)
}
@@ -74,6 +80,8 @@ func MonadMap[E, A, B any](fa ReaderOption[E, A], f func(A) B) ReaderOption[E, B
// readeroption.Of[Config](42),
// readeroption.Map[Config](func(x int) int { return x * 2 }),
// )
//
//go:inline
func Map[E, A, B any](f func(A) B) Operator[E, A, B] {
return readert.Map[ReaderOption[E, A], ReaderOption[E, B]](O.Map[A, B], f)
}
@@ -86,6 +94,8 @@ func Map[E, A, B any](f func(A) B) Operator[E, A, B] {
// findUser := func(id int) readeroption.ReaderOption[DB, User] { ... }
// loadProfile := func(user User) readeroption.ReaderOption[DB, Profile] { ... }
// result := readeroption.MonadChain(findUser(123), loadProfile)
//
//go:inline
func MonadChain[E, A, B any](ma ReaderOption[E, A], f Kleisli[E, A, B]) ReaderOption[E, B] {
return readert.MonadChain(O.MonadChain[A, B], ma, f)
}
@@ -99,6 +109,8 @@ func MonadChain[E, A, B any](ma ReaderOption[E, A], f Kleisli[E, A, B]) ReaderOp
// findUser(123),
// readeroption.Chain(loadProfile),
// )
//
//go:inline
func Chain[E, A, B any](f Kleisli[E, A, B]) Operator[E, A, B] {
return readert.Chain[ReaderOption[E, A]](O.Chain[A, B], f)
}
@@ -110,6 +122,8 @@ func Chain[E, A, B any](f Kleisli[E, A, B]) Operator[E, A, B] {
//
// ro := readeroption.Of[Config](42)
// result := ro(config) // Returns option.Some(42)
//
//go:inline
func Of[E, A any](a A) ReaderOption[E, A] {
return readert.MonadOf[ReaderOption[E, A]](O.Of[A], a)
}
@@ -121,6 +135,8 @@ func Of[E, A any](a A) ReaderOption[E, A] {
//
// ro := readeroption.None[Config, int]()
// result := ro(config) // Returns option.None[int]()
//
//go:inline
func None[E, A any]() ReaderOption[E, A] {
return reader.Of[E](O.None[A]())
}
@@ -128,12 +144,16 @@ func None[E, A any]() ReaderOption[E, A] {
// MonadAp applies a function wrapped in a ReaderOption to a value wrapped in a ReaderOption.
// Both computations are executed with the same environment.
// If either computation returns None, the result is None.
//
//go:inline
func MonadAp[E, A, B any](fab ReaderOption[E, func(A) B], fa ReaderOption[E, A]) ReaderOption[E, B] {
return readert.MonadAp[ReaderOption[E, A], ReaderOption[E, B], ReaderOption[E, func(A) B], E, A](O.MonadAp[B, A], fab, fa)
}
// Ap returns a function that applies a function wrapped in a ReaderOption to a value.
// This is the curried version of MonadAp.
//
//go:inline
func Ap[B, E, A any](fa ReaderOption[E, A]) Operator[E, func(A) B, B] {
return readert.Ap[ReaderOption[E, A], ReaderOption[E, B], ReaderOption[E, func(A) B], E, A](O.Ap[B, A], fa)
}
@@ -148,6 +168,8 @@ func Ap[B, E, A any](fa ReaderOption[E, A]) Operator[E, func(A) B, B] {
// readeroption.Of[Config](42),
// readeroption.Chain(isPositive),
// )
//
//go:inline
func FromPredicate[E, A any](pred func(A) bool) Kleisli[E, A, A] {
return fromoption.FromPredicate(FromOption[E, A], pred)
}
@@ -162,8 +184,14 @@ func FromPredicate[E, A any](pred func(A) bool) Kleisli[E, A, A] {
// func() reader.Reader[Config, string] { return reader.Of[Config]("not found") },
// func(user User) reader.Reader[Config, string] { return reader.Of[Config](user.Name) },
// )(findUser(123))
func Fold[E, A, B any](onNone func() Reader[E, B], onRight func(A) Reader[E, B]) func(ReaderOption[E, A]) Reader[E, B] {
return optiont.MatchE(reader.MonadChain[E, Option[A], B], onNone, onRight)
//
//go:inline
func Fold[E, A, B any](onNone Reader[E, B], onRight func(A) Reader[E, B]) func(ReaderOption[E, A]) Reader[E, B] {
return optiont.MatchE(reader.Chain[E, Option[A], B], function.Constant(onNone), onRight)
}
func MonadFold[E, A, B any](fa ReaderOption[E, A], onNone Reader[E, B], onRight func(A) Reader[E, B]) Reader[E, B] {
return optiont.MonadMatchE(fa, reader.MonadChain[E, Option[A], B], function.Constant(onNone), onRight)
}
// GetOrElse returns the value from a ReaderOption, or a default value if it's None.
@@ -173,8 +201,10 @@ func Fold[E, A, B any](onNone func() Reader[E, B], onRight func(A) Reader[E, B])
// result := readeroption.GetOrElse(
// func() reader.Reader[Config, User] { return reader.Of[Config](defaultUser) },
// )(findUser(123))
func GetOrElse[E, A any](onNone func() Reader[E, A]) func(ReaderOption[E, A]) Reader[E, A] {
return optiont.GetOrElse(reader.MonadChain[E, Option[A], A], onNone, reader.Of[E, A])
//
//go:inline
func GetOrElse[E, A any](onNone Reader[E, A]) func(ReaderOption[E, A]) Reader[E, A] {
return optiont.GetOrElse(reader.Chain[E, Option[A], A], function.Constant(onNone), reader.Of[E, A])
}
// Ask retrieves the current environment as a ReaderOption.
@@ -184,6 +214,8 @@ func GetOrElse[E, A any](onNone func() Reader[E, A]) func(ReaderOption[E, A]) Re
//
// getConfig := readeroption.Ask[Config, any]()
// result := getConfig(myConfig) // Returns option.Some(myConfig)
//
//go:inline
func Ask[E, L any]() ReaderOption[E, E] {
return fromreader.Ask(FromReader[E, E])()
}
@@ -195,6 +227,8 @@ func Ask[E, L any]() ReaderOption[E, E] {
//
// getTimeout := readeroption.Asks(func(cfg Config) int { return cfg.Timeout })
// result := getTimeout(myConfig) // Returns option.Some(myConfig.Timeout)
//
//go:inline
func Asks[E, A any](r Reader[E, A]) ReaderOption[E, A] {
return fromreader.Asks(FromReader[E, A])(r)
}
@@ -209,6 +243,8 @@ func Asks[E, A any](r Reader[E, A]) ReaderOption[E, A] {
// readeroption.Of[Config]("25"),
// parseAge,
// )
//
//go:inline
func MonadChainOptionK[E, A, B any](ma ReaderOption[E, A], f func(A) Option[B]) ReaderOption[E, B] {
return fromoption.MonadChainOptionK(
MonadChain[E, A, B],
@@ -228,6 +264,8 @@ func MonadChainOptionK[E, A, B any](ma ReaderOption[E, A], f func(A) Option[B])
// readeroption.Of[Config]("25"),
// readeroption.ChainOptionK[Config](parseAge),
// )
//
//go:inline
func ChainOptionK[E, A, B any](f func(A) Option[B]) Operator[E, A, B] {
return fromoption.ChainOptionK(
Chain[E, A, B],
@@ -243,6 +281,8 @@ func ChainOptionK[E, A, B any](f func(A) Option[B]) Operator[E, A, B] {
//
// nested := readeroption.Of[Config](readeroption.Of[Config](42))
// flattened := readeroption.Flatten(nested)
//
//go:inline
func Flatten[E, A any](mma ReaderOption[E, ReaderOption[E, A]]) ReaderOption[E, A] {
return MonadChain(mma, function.Identity[ReaderOption[E, A]])
}
@@ -264,6 +304,8 @@ func Flatten[E, A any](mma ReaderOption[E, ReaderOption[E, A]]) ReaderOption[E,
// result := readeroption.Local(func(g GlobalConfig) DBConfig { return g.DB })(
// readeroption.Asks(query),
// )
//
//go:inline
func Local[A, R2, R1 any](f func(R2) R1) func(ReaderOption[R1, A]) ReaderOption[R2, A] {
return reader.Local[Option[A]](f)
}
@@ -275,18 +317,34 @@ func Local[A, R2, R1 any](f func(R2) R1) func(ReaderOption[R1, A]) ReaderOption[
//
// ro := readeroption.Of[Config](42)
// result := readeroption.Read[int](myConfig)(ro) // Returns option.Some(42)
//
//go:inline
func Read[A, E any](e E) func(ReaderOption[E, A]) Option[A] {
return reader.Read[E, Option[A]](e)
return reader.Read[Option[A]](e)
}
// MonadFlap applies a value to a function wrapped in a ReaderOption.
// This is the reverse of MonadAp.
//
//go:inline
func MonadFlap[E, A, B any](fab ReaderOption[E, func(A) B], a A) ReaderOption[E, B] {
return functor.MonadFlap(MonadMap[E, func(A) B, B], fab, a)
}
// Flap returns a function that applies a value to a function wrapped in a ReaderOption.
// This is the curried version of MonadFlap.
//
//go:inline
func Flap[E, B, A any](a A) Operator[E, func(A) B, B] {
return functor.Flap(Map[E, func(A) B, B], a)
}
//go:inline
func MonadAlt[E, A any](fa ReaderOption[E, A], that ReaderOption[E, A]) ReaderOption[E, A] {
return MonadFold(fa, that, Of[E, A])
}
//go:inline
func Alt[E, A any](that ReaderOption[E, A]) Operator[E, A, A] {
return Fold(that, Of[E, A])
}

View File

@@ -22,6 +22,7 @@ import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/internal/utils"
O "github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/reader"
"github.com/stretchr/testify/assert"
)
@@ -135,15 +136,9 @@ func TestFromPredicate(t *testing.T) {
}
func TestFold(t *testing.T) {
onNone := func() Reader[MyContext, string] {
return func(ctx MyContext) string {
return "none"
}
}
onNone := reader.Of[MyContext]("none")
onSome := func(x int) Reader[MyContext, string] {
return func(ctx MyContext) string {
return fmt.Sprintf("%d", x)
}
return reader.Of[MyContext](fmt.Sprintf("%d", x))
}
// Test with Some
@@ -156,11 +151,7 @@ func TestFold(t *testing.T) {
}
func TestGetOrElse(t *testing.T) {
defaultValue := func() Reader[MyContext, int] {
return func(ctx MyContext) int {
return 0
}
}
defaultValue := reader.Of[MyContext](0)
// Test with Some
g1 := GetOrElse(defaultValue)(Of[MyContext](42))

View File

@@ -28,60 +28,70 @@ import "github.com/IBM/fp-go/v2/either"
// }
// variadicSum := either.Variadic0(sum)
// result := variadicSum(1, 2, 3) // Right(6)
//
//go:inline
func Variadic0[V, R any](f func([]V) (R, error)) func(...V) Result[R] {
return either.Variadic0(f)
}
// Variadic1 converts a function with 1 fixed parameter and a slice into a variadic function returning Either.
//
//go:inline
func Variadic1[T1, V, R any](f func(T1, []V) (R, error)) func(T1, ...V) Result[R] {
return either.Variadic1(f)
}
// Variadic2 converts a function with 2 fixed parameters and a slice into a variadic function returning Either.
//
//go:inline
func Variadic2[T1, T2, V, R any](f func(T1, T2, []V) (R, error)) func(T1, T2, ...V) Result[R] {
return either.Variadic2(f)
}
// Variadic3 converts a function with 3 fixed parameters and a slice into a variadic function returning Either.
//
//go:inline
func Variadic3[T1, T2, T3, V, R any](f func(T1, T2, T3, []V) (R, error)) func(T1, T2, T3, ...V) Result[R] {
return either.Variadic3(f)
}
// Variadic4 converts a function with 4 fixed parameters and a slice into a variadic function returning Either.
//
//go:inline
func Variadic4[T1, T2, T3, T4, V, R any](f func(T1, T2, T3, T4, []V) (R, error)) func(T1, T2, T3, T4, ...V) Result[R] {
return either.Variadic4(f)
}
// Unvariadic0 converts a variadic function returning (R, error) into a function taking a slice and returning Either.
//
//go:inline
func Unvariadic0[V, R any](f func(...V) (R, error)) func([]V) Result[R] {
return either.Unvariadic0(f)
}
// Unvariadic1 converts a variadic function with 1 fixed parameter into a function taking a slice and returning Either.
//
//go:inline
func Unvariadic1[T1, V, R any](f func(T1, ...V) (R, error)) func(T1, []V) Result[R] {
return either.Unvariadic1(f)
}
// Unvariadic2 converts a variadic function with 2 fixed parameters into a function taking a slice and returning Either.
//
//go:inline
func Unvariadic2[T1, T2, V, R any](f func(T1, T2, ...V) (R, error)) func(T1, T2, []V) Result[R] {
return either.Unvariadic2(f)
}
// Unvariadic3 converts a variadic function with 3 fixed parameters into a function taking a slice and returning Either.
//
//go:inline
func Unvariadic3[T1, T2, T3, V, R any](f func(T1, T2, T3, ...V) (R, error)) func(T1, T2, T3, []V) Result[R] {
return either.Unvariadic3(f)
}
// Unvariadic4 converts a variadic function with 4 fixed parameters into a function taking a slice and returning Either.
//
//go:inline
func Unvariadic4[T1, T2, T3, T4, V, R any](f func(T1, T2, T3, T4, ...V) (R, error)) func(T1, T2, T3, T4, []V) Result[R] {
return either.Unvariadic4(f)

View File

@@ -17,6 +17,8 @@ package lens
import "github.com/IBM/fp-go/v2/optics/lens/option"
//go:generate go run ../../main.go lens --dir . --filename gen_lens.go
// fp-go:Lens
type Person struct {
Name string
@@ -45,8 +47,20 @@ type Company struct {
Website *string
}
// fp-go:Lens
type CompanyExtended struct {
Company
Extended string
}
// fp-go:Lens
type CheckOption struct {
Name option.Option[string]
Value string `json:",omitempty"`
}
// fp-go:Lens
type WithGeneric[T any] struct {
Name string
Value T
}

View File

@@ -20,6 +20,7 @@ import (
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
@@ -153,3 +154,190 @@ func TestLensComposition(t *testing.T) {
assert.Equal(t, 55, olderCEO.CEO.Age)
assert.Equal(t, 50, company.CEO.Age) // Original unchanged
}
func TestPersonRefLensesIdempotent(t *testing.T) {
// Create a person pointer
person := &Person{
Name: "Alice",
Age: 30,
Email: "alice@example.com",
}
// Create ref lenses
refLenses := MakePersonRefLenses()
// Test that setting the same value returns the identical pointer (idempotent)
// This works because Name, Age, and Email use MakeLensStrict which has equality optimization
// Test Name field - setting same value should return same pointer
sameName := refLenses.Name.Set("Alice")(person)
assert.Same(t, person, sameName, "Setting Name to same value should return identical pointer")
// Test Age field - setting same value should return same pointer
sameAge := refLenses.Age.Set(30)(person)
assert.Same(t, person, sameAge, "Setting Age to same value should return identical pointer")
// Test Email field - setting same value should return same pointer
sameEmail := refLenses.Email.Set("alice@example.com")(person)
assert.Same(t, person, sameEmail, "Setting Email to same value should return identical pointer")
// Test that setting a different value creates a new pointer
differentName := refLenses.Name.Set("Bob")(person)
assert.NotSame(t, person, differentName, "Setting Name to different value should return new pointer")
assert.Equal(t, "Bob", differentName.Name)
assert.Equal(t, "Alice", person.Name, "Original should be unchanged")
differentAge := refLenses.Age.Set(31)(person)
assert.NotSame(t, person, differentAge, "Setting Age to different value should return new pointer")
assert.Equal(t, 31, differentAge.Age)
assert.Equal(t, 30, person.Age, "Original should be unchanged")
differentEmail := refLenses.Email.Set("bob@example.com")(person)
assert.NotSame(t, person, differentEmail, "Setting Email to different value should return new pointer")
assert.Equal(t, "bob@example.com", differentEmail.Email)
assert.Equal(t, "alice@example.com", person.Email, "Original should be unchanged")
}
func TestPersonRefLensesOptionalIdempotent(t *testing.T) {
// Test that setting an optional field to the same value returns the identical pointer
// This is important for performance and correctness in functional programming
// Test with Phone field set to a value
phoneValue := "555-1234"
person := &Person{
Name: "Alice",
Age: 30,
Email: "alice@example.com",
Phone: &phoneValue,
}
refLenses := MakePersonRefLenses()
// Test that setting Phone to the same value returns the same pointer
samePhone := refLenses.PhoneO.Set(O.Some(&phoneValue))(person)
assert.Same(t, person, samePhone, "Setting Phone to same value should return identical pointer")
// Test with Phone field set to nil
personNoPhone := &Person{
Name: "Bob",
Age: 25,
Email: "bob@example.com",
Phone: nil,
}
// Setting Phone to None when it's already nil should return same pointer
sameNilPhone := refLenses.PhoneO.Set(O.None[*string]())(personNoPhone)
assert.Same(t, personNoPhone, sameNilPhone, "Setting Phone to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newPhoneValue := "555-5678"
differentPhone := refLenses.PhoneO.Set(O.Some(&newPhoneValue))(person)
assert.NotSame(t, person, differentPhone, "Setting Phone to different value should return new pointer")
assert.Equal(t, &newPhoneValue, differentPhone.Phone)
assert.Equal(t, &phoneValue, person.Phone, "Original should be unchanged")
// Test setting from nil to Some creates new pointer
somePhone := refLenses.PhoneO.Set(O.Some(&phoneValue))(personNoPhone)
assert.NotSame(t, personNoPhone, somePhone, "Setting Phone from nil to Some should return new pointer")
assert.Equal(t, &phoneValue, somePhone.Phone)
assert.Nil(t, personNoPhone.Phone, "Original should be unchanged")
// Test setting from Some to None creates new pointer
nonePhone := refLenses.PhoneO.Set(O.None[*string]())(person)
assert.NotSame(t, person, nonePhone, "Setting Phone from Some to None should return new pointer")
assert.Nil(t, nonePhone.Phone)
assert.Equal(t, &phoneValue, person.Phone, "Original should be unchanged")
}
func TestAddressRefLensesOptionalIdempotent(t *testing.T) {
// Test Address.State optional field idempotency
stateValue := "California"
address := &Address{
Street: "123 Main St",
City: "Los Angeles",
ZipCode: "90001",
Country: "USA",
State: &stateValue,
}
refLenses := MakeAddressRefLenses()
// Test that setting State to the same value returns the same pointer
sameState := refLenses.StateO.Set(O.Some(&stateValue))(address)
assert.Same(t, address, sameState, "Setting State to same value should return identical pointer")
// Test with State field set to nil
addressNoState := &Address{
Street: "456 Oak Ave",
City: "Boston",
ZipCode: "02101",
Country: "USA",
State: nil,
}
// Setting State to None when it's already nil should return same pointer
sameNilState := refLenses.StateO.Set(O.None[*string]())(addressNoState)
assert.Same(t, addressNoState, sameNilState, "Setting State to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newStateValue := "New York"
differentState := refLenses.StateO.Set(O.Some(&newStateValue))(address)
assert.NotSame(t, address, differentState, "Setting State to different value should return new pointer")
assert.Equal(t, &newStateValue, differentState.State)
assert.Equal(t, &stateValue, address.State, "Original should be unchanged")
}
func TestCompanyRefLensesOptionalIdempotent(t *testing.T) {
// Test Company.Website optional field idempotency
websiteValue := "https://example.com"
company := &Company{
Name: "Tech Inc",
Address: Address{
Street: "789 Tech Blvd",
City: "San Francisco",
ZipCode: "94102",
Country: "USA",
},
CEO: Person{
Name: "Jane Doe",
Age: 45,
Email: "jane@techinc.com",
},
Website: &websiteValue,
}
refLenses := MakeCompanyRefLenses()
// Test that setting Website to the same value returns the same pointer
sameWebsite := refLenses.WebsiteO.Set(O.Some(&websiteValue))(company)
assert.Same(t, company, sameWebsite, "Setting Website to same value should return identical pointer")
// Test with Website field set to nil
companyNoWebsite := &Company{
Name: "Startup LLC",
Address: Address{
Street: "101 Innovation Way",
City: "Austin",
ZipCode: "78701",
Country: "USA",
},
CEO: Person{
Name: "John Smith",
Age: 35,
Email: "john@startup.com",
},
}
// Setting Website to None when it's already nil should return same pointer
sameNilWebsite := refLenses.WebsiteO.Set(O.None[*string]())(companyNoWebsite)
assert.Same(t, companyNoWebsite, sameNilWebsite, "Setting Website to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newWebsiteValue := "https://newsite.com"
differentWebsite := refLenses.WebsiteO.Set(O.Some(&newWebsiteValue))(company)
assert.NotSame(t, company, differentWebsite, "Setting Website to different value should return new pointer")
assert.Equal(t, &newWebsiteValue, differentWebsite.Website)
assert.Equal(t, &websiteValue, company.Website, "Original should be unchanged")
}

View File

@@ -2,250 +2,587 @@ package lens
// Code generated by go generate; DO NOT EDIT.
// This file was generated by robots at
// 2025-11-07 16:52:17.4935733 +0100 CET m=+0.003883901
// 2025-11-13 09:53:07.1489139 +0100 CET m=+0.005967001
import (
L "github.com/IBM/fp-go/v2/optics/lens"
LO "github.com/IBM/fp-go/v2/optics/lens/option"
O "github.com/IBM/fp-go/v2/option"
I "github.com/IBM/fp-go/v2/optics/iso/option"
IO "github.com/IBM/fp-go/v2/optics/iso/option"
option "github.com/IBM/fp-go/v2/optics/lens/option"
)
// PersonLenses provides lenses for accessing fields of Person
type PersonLenses struct {
// mandatory fields
Name L.Lens[Person, string]
Age L.Lens[Person, int]
Email L.Lens[Person, string]
Phone LO.LensO[Person, *string]
Phone L.Lens[Person, *string]
// optional fields
NameO LO.LensO[Person, string]
AgeO LO.LensO[Person, int]
EmailO LO.LensO[Person, string]
PhoneO LO.LensO[Person, *string]
}
// PersonRefLenses provides lenses for accessing fields of Person via a reference to Person
type PersonRefLenses struct {
// mandatory fields
Name L.Lens[*Person, string]
Age L.Lens[*Person, int]
Email L.Lens[*Person, string]
Phone LO.LensO[*Person, *string]
Phone L.Lens[*Person, *string]
// optional fields
NameO LO.LensO[*Person, string]
AgeO LO.LensO[*Person, int]
EmailO LO.LensO[*Person, string]
PhoneO LO.LensO[*Person, *string]
}
// MakePersonLenses creates a new PersonLenses with lenses for all fields
func MakePersonLenses() PersonLenses {
isoPhone := I.FromZero[*string]()
// mandatory lenses
lensName := L.MakeLens(
func(s Person) string { return s.Name },
func(s Person, v string) Person { s.Name = v; return s },
)
lensAge := L.MakeLens(
func(s Person) int { return s.Age },
func(s Person, v int) Person { s.Age = v; return s },
)
lensEmail := L.MakeLens(
func(s Person) string { return s.Email },
func(s Person, v string) Person { s.Email = v; return s },
)
lensPhone := L.MakeLens(
func(s Person) *string { return s.Phone },
func(s Person, v *string) Person { s.Phone = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[Person](IO.FromZero[string]())(lensName)
lensAgeO := LO.FromIso[Person](IO.FromZero[int]())(lensAge)
lensEmailO := LO.FromIso[Person](IO.FromZero[string]())(lensEmail)
lensPhoneO := LO.FromIso[Person](IO.FromZero[*string]())(lensPhone)
return PersonLenses{
Name: L.MakeLens(
func(s Person) string { return s.Name },
func(s Person, v string) Person { s.Name = v; return s },
),
Age: L.MakeLens(
func(s Person) int { return s.Age },
func(s Person, v int) Person { s.Age = v; return s },
),
Email: L.MakeLens(
func(s Person) string { return s.Email },
func(s Person, v string) Person { s.Email = v; return s },
),
Phone: L.MakeLens(
func(s Person) O.Option[*string] { return isoPhone.Get(s.Phone) },
func(s Person, v O.Option[*string]) Person { s.Phone = isoPhone.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Age: lensAge,
Email: lensEmail,
Phone: lensPhone,
// optional lenses
NameO: lensNameO,
AgeO: lensAgeO,
EmailO: lensEmailO,
PhoneO: lensPhoneO,
}
}
// MakePersonRefLenses creates a new PersonRefLenses with lenses for all fields
func MakePersonRefLenses() PersonRefLenses {
isoPhone := I.FromZero[*string]()
// mandatory lenses
lensName := L.MakeLensStrict(
func(s *Person) string { return s.Name },
func(s *Person, v string) *Person { s.Name = v; return s },
)
lensAge := L.MakeLensStrict(
func(s *Person) int { return s.Age },
func(s *Person, v int) *Person { s.Age = v; return s },
)
lensEmail := L.MakeLensStrict(
func(s *Person) string { return s.Email },
func(s *Person, v string) *Person { s.Email = v; return s },
)
lensPhone := L.MakeLensStrict(
func(s *Person) *string { return s.Phone },
func(s *Person, v *string) *Person { s.Phone = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[*Person](IO.FromZero[string]())(lensName)
lensAgeO := LO.FromIso[*Person](IO.FromZero[int]())(lensAge)
lensEmailO := LO.FromIso[*Person](IO.FromZero[string]())(lensEmail)
lensPhoneO := LO.FromIso[*Person](IO.FromZero[*string]())(lensPhone)
return PersonRefLenses{
Name: L.MakeLensRef(
func(s *Person) string { return s.Name },
func(s *Person, v string) *Person { s.Name = v; return s },
),
Age: L.MakeLensRef(
func(s *Person) int { return s.Age },
func(s *Person, v int) *Person { s.Age = v; return s },
),
Email: L.MakeLensRef(
func(s *Person) string { return s.Email },
func(s *Person, v string) *Person { s.Email = v; return s },
),
Phone: L.MakeLensRef(
func(s *Person) O.Option[*string] { return isoPhone.Get(s.Phone) },
func(s *Person, v O.Option[*string]) *Person { s.Phone = isoPhone.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Age: lensAge,
Email: lensEmail,
Phone: lensPhone,
// optional lenses
NameO: lensNameO,
AgeO: lensAgeO,
EmailO: lensEmailO,
PhoneO: lensPhoneO,
}
}
// AddressLenses provides lenses for accessing fields of Address
type AddressLenses struct {
// mandatory fields
Street L.Lens[Address, string]
City L.Lens[Address, string]
ZipCode L.Lens[Address, string]
Country L.Lens[Address, string]
State LO.LensO[Address, *string]
State L.Lens[Address, *string]
// optional fields
StreetO LO.LensO[Address, string]
CityO LO.LensO[Address, string]
ZipCodeO LO.LensO[Address, string]
CountryO LO.LensO[Address, string]
StateO LO.LensO[Address, *string]
}
// AddressRefLenses provides lenses for accessing fields of Address via a reference to Address
type AddressRefLenses struct {
// mandatory fields
Street L.Lens[*Address, string]
City L.Lens[*Address, string]
ZipCode L.Lens[*Address, string]
Country L.Lens[*Address, string]
State LO.LensO[*Address, *string]
State L.Lens[*Address, *string]
// optional fields
StreetO LO.LensO[*Address, string]
CityO LO.LensO[*Address, string]
ZipCodeO LO.LensO[*Address, string]
CountryO LO.LensO[*Address, string]
StateO LO.LensO[*Address, *string]
}
// MakeAddressLenses creates a new AddressLenses with lenses for all fields
func MakeAddressLenses() AddressLenses {
isoState := I.FromZero[*string]()
// mandatory lenses
lensStreet := L.MakeLens(
func(s Address) string { return s.Street },
func(s Address, v string) Address { s.Street = v; return s },
)
lensCity := L.MakeLens(
func(s Address) string { return s.City },
func(s Address, v string) Address { s.City = v; return s },
)
lensZipCode := L.MakeLens(
func(s Address) string { return s.ZipCode },
func(s Address, v string) Address { s.ZipCode = v; return s },
)
lensCountry := L.MakeLens(
func(s Address) string { return s.Country },
func(s Address, v string) Address { s.Country = v; return s },
)
lensState := L.MakeLens(
func(s Address) *string { return s.State },
func(s Address, v *string) Address { s.State = v; return s },
)
// optional lenses
lensStreetO := LO.FromIso[Address](IO.FromZero[string]())(lensStreet)
lensCityO := LO.FromIso[Address](IO.FromZero[string]())(lensCity)
lensZipCodeO := LO.FromIso[Address](IO.FromZero[string]())(lensZipCode)
lensCountryO := LO.FromIso[Address](IO.FromZero[string]())(lensCountry)
lensStateO := LO.FromIso[Address](IO.FromZero[*string]())(lensState)
return AddressLenses{
Street: L.MakeLens(
func(s Address) string { return s.Street },
func(s Address, v string) Address { s.Street = v; return s },
),
City: L.MakeLens(
func(s Address) string { return s.City },
func(s Address, v string) Address { s.City = v; return s },
),
ZipCode: L.MakeLens(
func(s Address) string { return s.ZipCode },
func(s Address, v string) Address { s.ZipCode = v; return s },
),
Country: L.MakeLens(
func(s Address) string { return s.Country },
func(s Address, v string) Address { s.Country = v; return s },
),
State: L.MakeLens(
func(s Address) O.Option[*string] { return isoState.Get(s.State) },
func(s Address, v O.Option[*string]) Address { s.State = isoState.ReverseGet(v); return s },
),
// mandatory lenses
Street: lensStreet,
City: lensCity,
ZipCode: lensZipCode,
Country: lensCountry,
State: lensState,
// optional lenses
StreetO: lensStreetO,
CityO: lensCityO,
ZipCodeO: lensZipCodeO,
CountryO: lensCountryO,
StateO: lensStateO,
}
}
// MakeAddressRefLenses creates a new AddressRefLenses with lenses for all fields
func MakeAddressRefLenses() AddressRefLenses {
isoState := I.FromZero[*string]()
// mandatory lenses
lensStreet := L.MakeLensStrict(
func(s *Address) string { return s.Street },
func(s *Address, v string) *Address { s.Street = v; return s },
)
lensCity := L.MakeLensStrict(
func(s *Address) string { return s.City },
func(s *Address, v string) *Address { s.City = v; return s },
)
lensZipCode := L.MakeLensStrict(
func(s *Address) string { return s.ZipCode },
func(s *Address, v string) *Address { s.ZipCode = v; return s },
)
lensCountry := L.MakeLensStrict(
func(s *Address) string { return s.Country },
func(s *Address, v string) *Address { s.Country = v; return s },
)
lensState := L.MakeLensStrict(
func(s *Address) *string { return s.State },
func(s *Address, v *string) *Address { s.State = v; return s },
)
// optional lenses
lensStreetO := LO.FromIso[*Address](IO.FromZero[string]())(lensStreet)
lensCityO := LO.FromIso[*Address](IO.FromZero[string]())(lensCity)
lensZipCodeO := LO.FromIso[*Address](IO.FromZero[string]())(lensZipCode)
lensCountryO := LO.FromIso[*Address](IO.FromZero[string]())(lensCountry)
lensStateO := LO.FromIso[*Address](IO.FromZero[*string]())(lensState)
return AddressRefLenses{
Street: L.MakeLensRef(
func(s *Address) string { return s.Street },
func(s *Address, v string) *Address { s.Street = v; return s },
),
City: L.MakeLensRef(
func(s *Address) string { return s.City },
func(s *Address, v string) *Address { s.City = v; return s },
),
ZipCode: L.MakeLensRef(
func(s *Address) string { return s.ZipCode },
func(s *Address, v string) *Address { s.ZipCode = v; return s },
),
Country: L.MakeLensRef(
func(s *Address) string { return s.Country },
func(s *Address, v string) *Address { s.Country = v; return s },
),
State: L.MakeLensRef(
func(s *Address) O.Option[*string] { return isoState.Get(s.State) },
func(s *Address, v O.Option[*string]) *Address { s.State = isoState.ReverseGet(v); return s },
),
// mandatory lenses
Street: lensStreet,
City: lensCity,
ZipCode: lensZipCode,
Country: lensCountry,
State: lensState,
// optional lenses
StreetO: lensStreetO,
CityO: lensCityO,
ZipCodeO: lensZipCodeO,
CountryO: lensCountryO,
StateO: lensStateO,
}
}
// CompanyLenses provides lenses for accessing fields of Company
type CompanyLenses struct {
// mandatory fields
Name L.Lens[Company, string]
Address L.Lens[Company, Address]
CEO L.Lens[Company, Person]
Website LO.LensO[Company, *string]
Website L.Lens[Company, *string]
// optional fields
NameO LO.LensO[Company, string]
AddressO LO.LensO[Company, Address]
CEOO LO.LensO[Company, Person]
WebsiteO LO.LensO[Company, *string]
}
// CompanyRefLenses provides lenses for accessing fields of Company via a reference to Company
type CompanyRefLenses struct {
// mandatory fields
Name L.Lens[*Company, string]
Address L.Lens[*Company, Address]
CEO L.Lens[*Company, Person]
Website LO.LensO[*Company, *string]
Website L.Lens[*Company, *string]
// optional fields
NameO LO.LensO[*Company, string]
AddressO LO.LensO[*Company, Address]
CEOO LO.LensO[*Company, Person]
WebsiteO LO.LensO[*Company, *string]
}
// MakeCompanyLenses creates a new CompanyLenses with lenses for all fields
func MakeCompanyLenses() CompanyLenses {
isoWebsite := I.FromZero[*string]()
// mandatory lenses
lensName := L.MakeLens(
func(s Company) string { return s.Name },
func(s Company, v string) Company { s.Name = v; return s },
)
lensAddress := L.MakeLens(
func(s Company) Address { return s.Address },
func(s Company, v Address) Company { s.Address = v; return s },
)
lensCEO := L.MakeLens(
func(s Company) Person { return s.CEO },
func(s Company, v Person) Company { s.CEO = v; return s },
)
lensWebsite := L.MakeLens(
func(s Company) *string { return s.Website },
func(s Company, v *string) Company { s.Website = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[Company](IO.FromZero[string]())(lensName)
lensAddressO := LO.FromIso[Company](IO.FromZero[Address]())(lensAddress)
lensCEOO := LO.FromIso[Company](IO.FromZero[Person]())(lensCEO)
lensWebsiteO := LO.FromIso[Company](IO.FromZero[*string]())(lensWebsite)
return CompanyLenses{
Name: L.MakeLens(
func(s Company) string { return s.Name },
func(s Company, v string) Company { s.Name = v; return s },
),
Address: L.MakeLens(
func(s Company) Address { return s.Address },
func(s Company, v Address) Company { s.Address = v; return s },
),
CEO: L.MakeLens(
func(s Company) Person { return s.CEO },
func(s Company, v Person) Company { s.CEO = v; return s },
),
Website: L.MakeLens(
func(s Company) O.Option[*string] { return isoWebsite.Get(s.Website) },
func(s Company, v O.Option[*string]) Company { s.Website = isoWebsite.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Address: lensAddress,
CEO: lensCEO,
Website: lensWebsite,
// optional lenses
NameO: lensNameO,
AddressO: lensAddressO,
CEOO: lensCEOO,
WebsiteO: lensWebsiteO,
}
}
// MakeCompanyRefLenses creates a new CompanyRefLenses with lenses for all fields
func MakeCompanyRefLenses() CompanyRefLenses {
isoWebsite := I.FromZero[*string]()
// mandatory lenses
lensName := L.MakeLensStrict(
func(s *Company) string { return s.Name },
func(s *Company, v string) *Company { s.Name = v; return s },
)
lensAddress := L.MakeLensStrict(
func(s *Company) Address { return s.Address },
func(s *Company, v Address) *Company { s.Address = v; return s },
)
lensCEO := L.MakeLensStrict(
func(s *Company) Person { return s.CEO },
func(s *Company, v Person) *Company { s.CEO = v; return s },
)
lensWebsite := L.MakeLensStrict(
func(s *Company) *string { return s.Website },
func(s *Company, v *string) *Company { s.Website = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[*Company](IO.FromZero[string]())(lensName)
lensAddressO := LO.FromIso[*Company](IO.FromZero[Address]())(lensAddress)
lensCEOO := LO.FromIso[*Company](IO.FromZero[Person]())(lensCEO)
lensWebsiteO := LO.FromIso[*Company](IO.FromZero[*string]())(lensWebsite)
return CompanyRefLenses{
Name: L.MakeLensRef(
func(s *Company) string { return s.Name },
func(s *Company, v string) *Company { s.Name = v; return s },
),
Address: L.MakeLensRef(
func(s *Company) Address { return s.Address },
func(s *Company, v Address) *Company { s.Address = v; return s },
),
CEO: L.MakeLensRef(
func(s *Company) Person { return s.CEO },
func(s *Company, v Person) *Company { s.CEO = v; return s },
),
Website: L.MakeLensRef(
func(s *Company) O.Option[*string] { return isoWebsite.Get(s.Website) },
func(s *Company, v O.Option[*string]) *Company { s.Website = isoWebsite.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Address: lensAddress,
CEO: lensCEO,
Website: lensWebsite,
// optional lenses
NameO: lensNameO,
AddressO: lensAddressO,
CEOO: lensCEOO,
WebsiteO: lensWebsiteO,
}
}
// CompanyExtendedLenses provides lenses for accessing fields of CompanyExtended
type CompanyExtendedLenses struct {
// mandatory fields
Name L.Lens[CompanyExtended, string]
Address L.Lens[CompanyExtended, Address]
CEO L.Lens[CompanyExtended, Person]
Website L.Lens[CompanyExtended, *string]
Extended L.Lens[CompanyExtended, string]
// optional fields
NameO LO.LensO[CompanyExtended, string]
AddressO LO.LensO[CompanyExtended, Address]
CEOO LO.LensO[CompanyExtended, Person]
WebsiteO LO.LensO[CompanyExtended, *string]
ExtendedO LO.LensO[CompanyExtended, string]
}
// CompanyExtendedRefLenses provides lenses for accessing fields of CompanyExtended via a reference to CompanyExtended
type CompanyExtendedRefLenses struct {
// mandatory fields
Name L.Lens[*CompanyExtended, string]
Address L.Lens[*CompanyExtended, Address]
CEO L.Lens[*CompanyExtended, Person]
Website L.Lens[*CompanyExtended, *string]
Extended L.Lens[*CompanyExtended, string]
// optional fields
NameO LO.LensO[*CompanyExtended, string]
AddressO LO.LensO[*CompanyExtended, Address]
CEOO LO.LensO[*CompanyExtended, Person]
WebsiteO LO.LensO[*CompanyExtended, *string]
ExtendedO LO.LensO[*CompanyExtended, string]
}
// MakeCompanyExtendedLenses creates a new CompanyExtendedLenses with lenses for all fields
func MakeCompanyExtendedLenses() CompanyExtendedLenses {
// mandatory lenses
lensName := L.MakeLens(
func(s CompanyExtended) string { return s.Name },
func(s CompanyExtended, v string) CompanyExtended { s.Name = v; return s },
)
lensAddress := L.MakeLens(
func(s CompanyExtended) Address { return s.Address },
func(s CompanyExtended, v Address) CompanyExtended { s.Address = v; return s },
)
lensCEO := L.MakeLens(
func(s CompanyExtended) Person { return s.CEO },
func(s CompanyExtended, v Person) CompanyExtended { s.CEO = v; return s },
)
lensWebsite := L.MakeLens(
func(s CompanyExtended) *string { return s.Website },
func(s CompanyExtended, v *string) CompanyExtended { s.Website = v; return s },
)
lensExtended := L.MakeLens(
func(s CompanyExtended) string { return s.Extended },
func(s CompanyExtended, v string) CompanyExtended { s.Extended = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[CompanyExtended](IO.FromZero[string]())(lensName)
lensAddressO := LO.FromIso[CompanyExtended](IO.FromZero[Address]())(lensAddress)
lensCEOO := LO.FromIso[CompanyExtended](IO.FromZero[Person]())(lensCEO)
lensWebsiteO := LO.FromIso[CompanyExtended](IO.FromZero[*string]())(lensWebsite)
lensExtendedO := LO.FromIso[CompanyExtended](IO.FromZero[string]())(lensExtended)
return CompanyExtendedLenses{
// mandatory lenses
Name: lensName,
Address: lensAddress,
CEO: lensCEO,
Website: lensWebsite,
Extended: lensExtended,
// optional lenses
NameO: lensNameO,
AddressO: lensAddressO,
CEOO: lensCEOO,
WebsiteO: lensWebsiteO,
ExtendedO: lensExtendedO,
}
}
// MakeCompanyExtendedRefLenses creates a new CompanyExtendedRefLenses with lenses for all fields
func MakeCompanyExtendedRefLenses() CompanyExtendedRefLenses {
// mandatory lenses
lensName := L.MakeLensStrict(
func(s *CompanyExtended) string { return s.Name },
func(s *CompanyExtended, v string) *CompanyExtended { s.Name = v; return s },
)
lensAddress := L.MakeLensStrict(
func(s *CompanyExtended) Address { return s.Address },
func(s *CompanyExtended, v Address) *CompanyExtended { s.Address = v; return s },
)
lensCEO := L.MakeLensStrict(
func(s *CompanyExtended) Person { return s.CEO },
func(s *CompanyExtended, v Person) *CompanyExtended { s.CEO = v; return s },
)
lensWebsite := L.MakeLensStrict(
func(s *CompanyExtended) *string { return s.Website },
func(s *CompanyExtended, v *string) *CompanyExtended { s.Website = v; return s },
)
lensExtended := L.MakeLensStrict(
func(s *CompanyExtended) string { return s.Extended },
func(s *CompanyExtended, v string) *CompanyExtended { s.Extended = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[*CompanyExtended](IO.FromZero[string]())(lensName)
lensAddressO := LO.FromIso[*CompanyExtended](IO.FromZero[Address]())(lensAddress)
lensCEOO := LO.FromIso[*CompanyExtended](IO.FromZero[Person]())(lensCEO)
lensWebsiteO := LO.FromIso[*CompanyExtended](IO.FromZero[*string]())(lensWebsite)
lensExtendedO := LO.FromIso[*CompanyExtended](IO.FromZero[string]())(lensExtended)
return CompanyExtendedRefLenses{
// mandatory lenses
Name: lensName,
Address: lensAddress,
CEO: lensCEO,
Website: lensWebsite,
Extended: lensExtended,
// optional lenses
NameO: lensNameO,
AddressO: lensAddressO,
CEOO: lensCEOO,
WebsiteO: lensWebsiteO,
ExtendedO: lensExtendedO,
}
}
// CheckOptionLenses provides lenses for accessing fields of CheckOption
type CheckOptionLenses struct {
// mandatory fields
Name L.Lens[CheckOption, option.Option[string]]
Value LO.LensO[CheckOption, string]
Value L.Lens[CheckOption, string]
// optional fields
ValueO LO.LensO[CheckOption, string]
}
// CheckOptionRefLenses provides lenses for accessing fields of CheckOption via a reference to CheckOption
type CheckOptionRefLenses struct {
// mandatory fields
Name L.Lens[*CheckOption, option.Option[string]]
Value LO.LensO[*CheckOption, string]
Value L.Lens[*CheckOption, string]
// optional fields
ValueO LO.LensO[*CheckOption, string]
}
// MakeCheckOptionLenses creates a new CheckOptionLenses with lenses for all fields
func MakeCheckOptionLenses() CheckOptionLenses {
isoValue := I.FromZero[string]()
// mandatory lenses
lensName := L.MakeLens(
func(s CheckOption) option.Option[string] { return s.Name },
func(s CheckOption, v option.Option[string]) CheckOption { s.Name = v; return s },
)
lensValue := L.MakeLens(
func(s CheckOption) string { return s.Value },
func(s CheckOption, v string) CheckOption { s.Value = v; return s },
)
// optional lenses
lensValueO := LO.FromIso[CheckOption](IO.FromZero[string]())(lensValue)
return CheckOptionLenses{
Name: L.MakeLens(
func(s CheckOption) option.Option[string] { return s.Name },
func(s CheckOption, v option.Option[string]) CheckOption { s.Name = v; return s },
),
Value: L.MakeLens(
func(s CheckOption) O.Option[string] { return isoValue.Get(s.Value) },
func(s CheckOption, v O.Option[string]) CheckOption { s.Value = isoValue.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Value: lensValue,
// optional lenses
ValueO: lensValueO,
}
}
// MakeCheckOptionRefLenses creates a new CheckOptionRefLenses with lenses for all fields
func MakeCheckOptionRefLenses() CheckOptionRefLenses {
isoValue := I.FromZero[string]()
// mandatory lenses
lensName := L.MakeLensRef(
func(s *CheckOption) option.Option[string] { return s.Name },
func(s *CheckOption, v option.Option[string]) *CheckOption { s.Name = v; return s },
)
lensValue := L.MakeLensStrict(
func(s *CheckOption) string { return s.Value },
func(s *CheckOption, v string) *CheckOption { s.Value = v; return s },
)
// optional lenses
lensValueO := LO.FromIso[*CheckOption](IO.FromZero[string]())(lensValue)
return CheckOptionRefLenses{
Name: L.MakeLensRef(
func(s *CheckOption) option.Option[string] { return s.Name },
func(s *CheckOption, v option.Option[string]) *CheckOption { s.Name = v; return s },
),
Value: L.MakeLensRef(
func(s *CheckOption) O.Option[string] { return isoValue.Get(s.Value) },
func(s *CheckOption, v O.Option[string]) *CheckOption { s.Value = isoValue.ReverseGet(v); return s },
),
// mandatory lenses
Name: lensName,
Value: lensValue,
// optional lenses
ValueO: lensValueO,
}
}
// WithGenericLenses provides lenses for accessing fields of WithGeneric
type WithGenericLenses[T any] struct {
// mandatory fields
Name L.Lens[WithGeneric[T], string]
Value L.Lens[WithGeneric[T], T]
// optional fields
NameO LO.LensO[WithGeneric[T], string]
}
// WithGenericRefLenses provides lenses for accessing fields of WithGeneric via a reference to WithGeneric
type WithGenericRefLenses[T any] struct {
// mandatory fields
Name L.Lens[*WithGeneric[T], string]
Value L.Lens[*WithGeneric[T], T]
// optional fields
NameO LO.LensO[*WithGeneric[T], string]
}
// MakeWithGenericLenses creates a new WithGenericLenses with lenses for all fields
func MakeWithGenericLenses[T any]() WithGenericLenses[T] {
// mandatory lenses
lensName := L.MakeLens(
func(s WithGeneric[T]) string { return s.Name },
func(s WithGeneric[T], v string) WithGeneric[T] { s.Name = v; return s },
)
lensValue := L.MakeLens(
func(s WithGeneric[T]) T { return s.Value },
func(s WithGeneric[T], v T) WithGeneric[T] { s.Value = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[WithGeneric[T]](IO.FromZero[string]())(lensName)
return WithGenericLenses[T]{
// mandatory lenses
Name: lensName,
Value: lensValue,
// optional lenses
NameO: lensNameO,
}
}
// MakeWithGenericRefLenses creates a new WithGenericRefLenses with lenses for all fields
func MakeWithGenericRefLenses[T any]() WithGenericRefLenses[T] {
// mandatory lenses
lensName := L.MakeLensStrict(
func(s *WithGeneric[T]) string { return s.Name },
func(s *WithGeneric[T], v string) *WithGeneric[T] { s.Name = v; return s },
)
lensValue := L.MakeLensRef(
func(s *WithGeneric[T]) T { return s.Value },
func(s *WithGeneric[T], v T) *WithGeneric[T] { s.Value = v; return s },
)
// optional lenses
lensNameO := LO.FromIso[*WithGeneric[T]](IO.FromZero[string]())(lensName)
return WithGenericRefLenses[T]{
// mandatory lenses
Name: lensName,
Value: lensValue,
// optional lenses
NameO: lensNameO,
}
}