1
0
mirror of https://github.com/IBM/fp-go.git synced 2025-12-07 23:03:15 +02:00

Compare commits

...

3 Commits

Author SHA1 Message Date
Dr. Carsten Leue
d2dbce6e8b fix: improve lens handling
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 18:23:57 +01:00
Dr. Carsten Leue
6f7ec0768d fix: improve lens generation
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 17:28:20 +01:00
Dr. Carsten Leue
ca813b673c fix: better tests and doc
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2025-11-12 16:24:12 +01:00
26 changed files with 3696 additions and 569 deletions

View File

@@ -15,14 +15,163 @@
package bytes
// Empty returns an empty byte slice.
//
// This function returns the identity element for the byte slice Monoid,
// which is an empty byte slice. It's useful as a starting point for
// building byte slices or as a default value.
//
// Returns:
// - An empty byte slice ([]byte{})
//
// Properties:
// - Empty() is the identity element for Monoid.Concat
// - Monoid.Concat(Empty(), x) == x
// - Monoid.Concat(x, Empty()) == x
//
// Example - Basic usage:
//
// empty := Empty()
// fmt.Println(len(empty)) // 0
//
// Example - As identity element:
//
// data := []byte("hello")
// result1 := Monoid.Concat(Empty(), data) // []byte("hello")
// result2 := Monoid.Concat(data, Empty()) // []byte("hello")
//
// Example - Building byte slices:
//
// // Start with empty and build up
// buffer := Empty()
// buffer = Monoid.Concat(buffer, []byte("Hello"))
// buffer = Monoid.Concat(buffer, []byte(" "))
// buffer = Monoid.Concat(buffer, []byte("World"))
// // buffer: []byte("Hello World")
//
// See also:
// - Monoid.Empty(): Alternative way to get empty byte slice
// - ConcatAll(): For concatenating multiple byte slices
func Empty() []byte {
return Monoid.Empty()
}
// ToString converts a byte slice to a string.
//
// This function performs a direct conversion from []byte to string.
// The conversion creates a new string with a copy of the byte data.
//
// Parameters:
// - a: The byte slice to convert
//
// Returns:
// - A string containing the same data as the byte slice
//
// Performance Note:
//
// This conversion allocates a new string. For performance-critical code
// that needs to avoid allocations, consider using unsafe.String (Go 1.20+)
// or working directly with byte slices.
//
// Example - Basic conversion:
//
// bytes := []byte("hello")
// str := ToString(bytes)
// fmt.Println(str) // "hello"
//
// Example - Converting binary data:
//
// // ASCII codes for "Hello"
// data := []byte{0x48, 0x65, 0x6c, 0x6c, 0x6f}
// str := ToString(data)
// fmt.Println(str) // "Hello"
//
// Example - Empty byte slice:
//
// empty := Empty()
// str := ToString(empty)
// fmt.Println(str == "") // true
//
// Example - UTF-8 encoded text:
//
// utf8Bytes := []byte("Hello, 世界")
// str := ToString(utf8Bytes)
// fmt.Println(str) // "Hello, 世界"
//
// Example - Round-trip conversion:
//
// original := "test string"
// bytes := []byte(original)
// result := ToString(bytes)
// fmt.Println(original == result) // true
//
// See also:
// - []byte(string): For converting string to byte slice
// - Size(): For getting the length of a byte slice
func ToString(a []byte) string {
return string(a)
}
// Size returns the number of bytes in a byte slice.
//
// This function returns the length of the byte slice, which is the number
// of bytes it contains. This is equivalent to len(as) but provided as a
// named function for use in functional composition.
//
// Parameters:
// - as: The byte slice to measure
//
// Returns:
// - The number of bytes in the slice
//
// Example - Basic usage:
//
// data := []byte("hello")
// size := Size(data)
// fmt.Println(size) // 5
//
// Example - Empty slice:
//
// empty := Empty()
// size := Size(empty)
// fmt.Println(size) // 0
//
// Example - Binary data:
//
// binary := []byte{0x01, 0x02, 0x03, 0x04}
// size := Size(binary)
// fmt.Println(size) // 4
//
// Example - UTF-8 encoded text:
//
// // Note: Size returns byte count, not character count
// utf8 := []byte("Hello, 世界")
// byteCount := Size(utf8)
// fmt.Println(byteCount) // 13 (not 9 characters)
//
// Example - Using in functional composition:
//
// import "github.com/IBM/fp-go/v2/array"
//
// slices := [][]byte{
// []byte("a"),
// []byte("bb"),
// []byte("ccc"),
// }
//
// // Map to get sizes
// sizes := array.Map(Size)(slices)
// // sizes: []int{1, 2, 3}
//
// Example - Checking if slice is empty:
//
// data := []byte("test")
// isEmpty := Size(data) == 0
// fmt.Println(isEmpty) // false
//
// See also:
// - len(): Built-in function for getting slice length
// - ToString(): For converting byte slice to string
func Size(as []byte) int {
return len(as)
}

View File

@@ -187,6 +187,299 @@ func TestOrd(t *testing.T) {
})
}
// TestOrdProperties tests mathematical properties of Ord
func TestOrdProperties(t *testing.T) {
t.Run("reflexivity: x == x", func(t *testing.T) {
testCases := [][]byte{
[]byte{},
[]byte("a"),
[]byte("test"),
[]byte{0x01, 0x02, 0x03},
}
for _, tc := range testCases {
assert.Equal(t, 0, Ord.Compare(tc, tc),
"Compare(%v, %v) should be 0", tc, tc)
assert.True(t, Ord.Equals(tc, tc),
"Equals(%v, %v) should be true", tc, tc)
}
})
t.Run("antisymmetry: if x <= y and y <= x then x == y", func(t *testing.T) {
testCases := []struct {
a, b []byte
}{
{[]byte("abc"), []byte("abc")},
{[]byte{}, []byte{}},
{[]byte{0x01}, []byte{0x01}},
}
for _, tc := range testCases {
cmp1 := Ord.Compare(tc.a, tc.b)
cmp2 := Ord.Compare(tc.b, tc.a)
if cmp1 <= 0 && cmp2 <= 0 {
assert.True(t, Ord.Equals(tc.a, tc.b),
"If %v <= %v and %v <= %v, they should be equal", tc.a, tc.b, tc.b, tc.a)
}
}
})
t.Run("transitivity: if x <= y and y <= z then x <= z", func(t *testing.T) {
x := []byte("a")
y := []byte("b")
z := []byte("c")
cmpXY := Ord.Compare(x, y)
cmpYZ := Ord.Compare(y, z)
cmpXZ := Ord.Compare(x, z)
if cmpXY <= 0 && cmpYZ <= 0 {
assert.True(t, cmpXZ <= 0,
"If %v <= %v and %v <= %v, then %v <= %v", x, y, y, z, x, z)
}
})
t.Run("totality: either x <= y or y <= x", func(t *testing.T) {
testCases := []struct {
a, b []byte
}{
{[]byte("abc"), []byte("abd")},
{[]byte("xyz"), []byte("abc")},
{[]byte{}, []byte("a")},
{[]byte{0x01}, []byte{0x02}},
}
for _, tc := range testCases {
cmp1 := Ord.Compare(tc.a, tc.b)
cmp2 := Ord.Compare(tc.b, tc.a)
assert.True(t, cmp1 <= 0 || cmp2 <= 0,
"Either %v <= %v or %v <= %v must be true", tc.a, tc.b, tc.b, tc.a)
}
})
}
// TestEdgeCases tests edge cases and boundary conditions
func TestEdgeCases(t *testing.T) {
t.Run("very large byte slices", func(t *testing.T) {
large := make([]byte, 1000000)
for i := range large {
large[i] = byte(i % 256)
}
size := Size(large)
assert.Equal(t, 1000000, size)
str := ToString(large)
assert.Equal(t, 1000000, len(str))
})
t.Run("concatenating many slices", func(t *testing.T) {
slices := make([][]byte, 100)
for i := range slices {
slices[i] = []byte{byte(i)}
}
result := ConcatAll(slices...)
assert.Equal(t, 100, Size(result))
})
t.Run("null bytes in slice", func(t *testing.T) {
data := []byte{0x00, 0x01, 0x00, 0x02}
size := Size(data)
assert.Equal(t, 4, size)
str := ToString(data)
assert.Equal(t, 4, len(str))
})
t.Run("comparing slices with null bytes", func(t *testing.T) {
a := []byte{0x00, 0x01}
b := []byte{0x00, 0x02}
assert.Equal(t, -1, Ord.Compare(a, b))
})
}
// TestMonoidConcatPerformance tests concatenation performance characteristics
func TestMonoidConcatPerformance(t *testing.T) {
t.Run("ConcatAll vs repeated Concat", func(t *testing.T) {
slices := [][]byte{
[]byte("a"),
[]byte("b"),
[]byte("c"),
[]byte("d"),
[]byte("e"),
}
// Using ConcatAll
result1 := ConcatAll(slices...)
// Using repeated Concat
result2 := Monoid.Empty()
for _, s := range slices {
result2 = Monoid.Concat(result2, s)
}
assert.Equal(t, result1, result2)
assert.Equal(t, []byte("abcde"), result1)
})
}
// TestRoundTrip tests round-trip conversions
func TestRoundTrip(t *testing.T) {
t.Run("string to bytes to string", func(t *testing.T) {
original := "Hello, World! 世界"
bytes := []byte(original)
result := ToString(bytes)
assert.Equal(t, original, result)
})
t.Run("bytes to string to bytes", func(t *testing.T) {
original := []byte{0x48, 0x65, 0x6c, 0x6c, 0x6f}
str := ToString(original)
result := []byte(str)
assert.Equal(t, original, result)
})
}
// TestConcatAllVariadic tests ConcatAll with various argument counts
func TestConcatAllVariadic(t *testing.T) {
t.Run("zero arguments", func(t *testing.T) {
result := ConcatAll()
assert.Equal(t, []byte{}, result)
})
t.Run("one argument", func(t *testing.T) {
result := ConcatAll([]byte("test"))
assert.Equal(t, []byte("test"), result)
})
t.Run("two arguments", func(t *testing.T) {
result := ConcatAll([]byte("hello"), []byte("world"))
assert.Equal(t, []byte("helloworld"), result)
})
t.Run("many arguments", func(t *testing.T) {
result := ConcatAll(
[]byte("a"),
[]byte("b"),
[]byte("c"),
[]byte("d"),
[]byte("e"),
[]byte("f"),
[]byte("g"),
[]byte("h"),
[]byte("i"),
[]byte("j"),
)
assert.Equal(t, []byte("abcdefghij"), result)
})
}
// Benchmark tests
func BenchmarkToString(b *testing.B) {
data := []byte("Hello, World!")
b.Run("small", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = ToString(data)
}
})
b.Run("large", func(b *testing.B) {
large := make([]byte, 10000)
for i := range large {
large[i] = byte(i % 256)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = ToString(large)
}
})
}
func BenchmarkSize(b *testing.B) {
data := []byte("Hello, World!")
for i := 0; i < b.N; i++ {
_ = Size(data)
}
}
func BenchmarkMonoidConcat(b *testing.B) {
a := []byte("Hello")
c := []byte(" World")
b.Run("small slices", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Monoid.Concat(a, c)
}
})
b.Run("large slices", func(b *testing.B) {
large1 := make([]byte, 10000)
large2 := make([]byte, 10000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = Monoid.Concat(large1, large2)
}
})
}
func BenchmarkConcatAll(b *testing.B) {
slices := [][]byte{
[]byte("Hello"),
[]byte(" "),
[]byte("World"),
[]byte("!"),
}
b.Run("few slices", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = ConcatAll(slices...)
}
})
b.Run("many slices", func(b *testing.B) {
many := make([][]byte, 100)
for i := range many {
many[i] = []byte{byte(i)}
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = ConcatAll(many...)
}
})
}
func BenchmarkOrdCompare(b *testing.B) {
a := []byte("abc")
c := []byte("abd")
b.Run("equal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Ord.Compare(a, a)
}
})
b.Run("different", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Ord.Compare(a, c)
}
})
b.Run("large slices", func(b *testing.B) {
large1 := make([]byte, 10000)
large2 := make([]byte, 10000)
large2[9999] = 1
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = Ord.Compare(large1, large2)
}
})
}
// Example tests
func ExampleEmpty() {
empty := Empty()
@@ -219,3 +512,17 @@ func ExampleConcatAll() {
// Output:
}
func ExampleMonoid_concat() {
result := Monoid.Concat([]byte("Hello"), []byte(" World"))
println(string(result)) // Hello World
// Output:
}
func ExampleOrd_compare() {
cmp := Ord.Compare([]byte("abc"), []byte("abd"))
println(cmp) // -1 (abc < abd)
// Output:
}

4
v2/bytes/coverage.out Normal file
View File

@@ -0,0 +1,4 @@
mode: set
github.com/IBM/fp-go/v2/bytes/bytes.go:55.21,57.2 1 1
github.com/IBM/fp-go/v2/bytes/bytes.go:111.32,113.2 1 1
github.com/IBM/fp-go/v2/bytes/bytes.go:175.26,177.2 1 1

View File

@@ -23,12 +23,219 @@ import (
)
var (
// monoid for byte arrays
// Monoid is the Monoid instance for byte slices.
//
// This Monoid combines byte slices through concatenation, with an empty
// byte slice as the identity element. It satisfies the monoid laws:
//
// Identity laws:
// - Monoid.Concat(Monoid.Empty(), x) == x (left identity)
// - Monoid.Concat(x, Monoid.Empty()) == x (right identity)
//
// Associativity law:
// - Monoid.Concat(Monoid.Concat(a, b), c) == Monoid.Concat(a, Monoid.Concat(b, c))
//
// Operations:
// - Empty(): Returns an empty byte slice []byte{}
// - Concat(a, b []byte): Concatenates two byte slices
//
// Example - Basic concatenation:
//
// result := Monoid.Concat([]byte("Hello"), []byte(" World"))
// // result: []byte("Hello World")
//
// Example - Identity element:
//
// empty := Monoid.Empty()
// data := []byte("test")
// result1 := Monoid.Concat(empty, data) // []byte("test")
// result2 := Monoid.Concat(data, empty) // []byte("test")
//
// Example - Building byte buffers:
//
// buffer := Monoid.Empty()
// buffer = Monoid.Concat(buffer, []byte("Line 1\n"))
// buffer = Monoid.Concat(buffer, []byte("Line 2\n"))
// buffer = Monoid.Concat(buffer, []byte("Line 3\n"))
//
// Example - Associativity:
//
// a := []byte("a")
// b := []byte("b")
// c := []byte("c")
// left := Monoid.Concat(Monoid.Concat(a, b), c) // []byte("abc")
// right := Monoid.Concat(a, Monoid.Concat(b, c)) // []byte("abc")
// // left == right
//
// See also:
// - ConcatAll: For concatenating multiple byte slices at once
// - Empty(): Convenience function for getting empty byte slice
Monoid = A.Monoid[byte]()
// ConcatAll concatenates all bytes
// ConcatAll efficiently concatenates multiple byte slices into a single slice.
//
// This function takes a variadic number of byte slices and combines them
// into a single byte slice. It pre-allocates the exact amount of memory
// needed, making it more efficient than repeated concatenation.
//
// Parameters:
// - slices: Zero or more byte slices to concatenate
//
// Returns:
// - A new byte slice containing all input slices concatenated in order
//
// Performance:
//
// ConcatAll is more efficient than using Monoid.Concat repeatedly because
// it calculates the total size upfront and allocates memory once, avoiding
// multiple allocations and copies.
//
// Example - Basic usage:
//
// result := ConcatAll(
// []byte("Hello"),
// []byte(" "),
// []byte("World"),
// )
// // result: []byte("Hello World")
//
// Example - Empty input:
//
// result := ConcatAll()
// // result: []byte{}
//
// Example - Single slice:
//
// result := ConcatAll([]byte("test"))
// // result: []byte("test")
//
// Example - Building protocol messages:
//
// import "encoding/binary"
//
// header := []byte{0x01, 0x02}
// length := make([]byte, 4)
// binary.BigEndian.PutUint32(length, 100)
// payload := []byte("data")
// footer := []byte{0xFF}
//
// message := ConcatAll(header, length, payload, footer)
//
// Example - With empty slices:
//
// result := ConcatAll(
// []byte("a"),
// []byte{},
// []byte("b"),
// []byte{},
// []byte("c"),
// )
// // result: []byte("abc")
//
// Example - Building CSV line:
//
// fields := [][]byte{
// []byte("John"),
// []byte("Doe"),
// []byte("30"),
// }
// separator := []byte(",")
//
// // Interleave fields with separators
// parts := [][]byte{
// fields[0], separator,
// fields[1], separator,
// fields[2],
// }
// line := ConcatAll(parts...)
// // line: []byte("John,Doe,30")
//
// See also:
// - Monoid.Concat: For concatenating exactly two byte slices
// - bytes.Join: Standard library function for joining with separator
ConcatAll = A.ArrayConcatAll[byte]
// Ord implements the default ordering on bytes
// Ord is the Ord instance for byte slices providing lexicographic ordering.
//
// This Ord instance compares byte slices lexicographically (dictionary order),
// comparing bytes from left to right until a difference is found or one slice
// ends. It uses the standard library's bytes.Compare and bytes.Equal functions.
//
// Comparison rules:
// - Compares byte-by-byte from left to right
// - First differing byte determines the order
// - Shorter slice is less than longer slice if all bytes match
// - Empty slice is less than any non-empty slice
//
// Operations:
// - Compare(a, b []byte) int: Returns -1 if a < b, 0 if a == b, 1 if a > b
// - Equals(a, b []byte) bool: Returns true if slices are equal
//
// Example - Basic comparison:
//
// cmp := Ord.Compare([]byte("abc"), []byte("abd"))
// // cmp: -1 (abc < abd)
//
// cmp = Ord.Compare([]byte("xyz"), []byte("abc"))
// // cmp: 1 (xyz > abc)
//
// cmp = Ord.Compare([]byte("test"), []byte("test"))
// // cmp: 0 (equal)
//
// Example - Length differences:
//
// cmp := Ord.Compare([]byte("ab"), []byte("abc"))
// // cmp: -1 (shorter is less)
//
// cmp = Ord.Compare([]byte("abc"), []byte("ab"))
// // cmp: 1 (longer is greater)
//
// Example - Empty slices:
//
// cmp := Ord.Compare([]byte{}, []byte("a"))
// // cmp: -1 (empty is less)
//
// cmp = Ord.Compare([]byte{}, []byte{})
// // cmp: 0 (both empty)
//
// Example - Equality check:
//
// equal := Ord.Equals([]byte("test"), []byte("test"))
// // equal: true
//
// equal = Ord.Equals([]byte("test"), []byte("Test"))
// // equal: false (case-sensitive)
//
// Example - Sorting byte slices:
//
// import "github.com/IBM/fp-go/v2/array"
//
// data := [][]byte{
// []byte("zebra"),
// []byte("apple"),
// []byte("mango"),
// }
//
// sorted := array.Sort(Ord)(data)
// // sorted: [[]byte("apple"), []byte("mango"), []byte("zebra")]
//
// Example - Binary data comparison:
//
// cmp := Ord.Compare([]byte{0x01, 0x02}, []byte{0x01, 0x03})
// // cmp: -1 (0x02 < 0x03)
//
// Example - Finding minimum:
//
// import O "github.com/IBM/fp-go/v2/ord"
//
// a := []byte("xyz")
// b := []byte("abc")
// min := O.Min(Ord)(a, b)
// // min: []byte("abc")
//
// See also:
// - bytes.Compare: Standard library comparison function
// - bytes.Equal: Standard library equality function
// - array.Sort: For sorting slices using an Ord instance
Ord = O.MakeOrd(bytes.Compare, bytes.Equal)
)

View File

@@ -60,10 +60,11 @@ type structInfo struct {
// fieldInfo holds information about a struct field
type fieldInfo struct {
Name string
TypeName string
BaseType string // TypeName without leading * for pointer types
IsOptional bool // true if field is a pointer or has json omitempty tag
Name string
TypeName string
BaseType string // TypeName without leading * for pointer types
IsOptional bool // true if field is a pointer or has json omitempty tag
IsComparable bool // true if the type is comparable (can use ==)
}
// templateData holds data for template rendering
@@ -115,17 +116,25 @@ func Make{{.Name}}Lenses() {{.Name}}Lenses {
// Make{{.Name}}RefLenses creates a new {{.Name}}RefLenses with lenses for all fields
func Make{{.Name}}RefLenses() {{.Name}}RefLenses {
{{- range .Fields}}
{{- if .IsOptional}}
iso{{.Name}} := I.FromZero[{{.TypeName}}]()
{{- end}}
{{- end}}
return {{.Name}}RefLenses{
{{- range .Fields}}
{{- if .IsOptional}}
{{.Name}}: L.MakeLensRef(
func(s *{{$.Name}}) O.Option[{{.TypeName}}] { return iso{{.Name}}.Get(s.{{.Name}}) },
func(s *{{$.Name}}, v O.Option[{{.TypeName}}]) *{{$.Name}} { s.{{.Name}} = iso{{.Name}}.ReverseGet(v); return s },
{{- if .IsComparable}}
{{.Name}}: LO.FromIso[*{{$.Name}}](I.FromZero[{{.TypeName}}]())(L.MakeLensStrict(
func(s *{{$.Name}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}, v {{.TypeName}}) *{{$.Name}} { s.{{.Name}} = v; return s },
)),
{{- else}}
{{.Name}}: LO.FromIso[*{{$.Name}}](I.FromZero[{{.TypeName}}]())(L.MakeLensRef(
func(s *{{$.Name}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}, v {{.TypeName}}) *{{$.Name}} { s.{{.Name}} = v; return s },
)),
{{- end}}
{{- else}}
{{- if .IsComparable}}
{{.Name}}: L.MakeLensStrict(
func(s *{{$.Name}}) {{.TypeName}} { return s.{{.Name}} },
func(s *{{$.Name}}, v {{.TypeName}}) *{{$.Name}} { s.{{.Name}} = v; return s },
),
{{- else}}
{{.Name}}: L.MakeLensRef(
@@ -133,6 +142,7 @@ func Make{{.Name}}RefLenses() {{.Name}}RefLenses {
func(s *{{$.Name}}, v {{.TypeName}}) *{{$.Name}} { s.{{.Name}} = v; return s },
),
{{- end}}
{{- end}}
{{- end}}
}
}
@@ -257,6 +267,111 @@ func isPointerType(expr ast.Expr) bool {
return ok
}
// isComparableType checks if a type expression represents a comparable type.
// Comparable types in Go include:
// - Basic types (bool, numeric types, string)
// - Pointer types
// - Channel types
// - Interface types
// - Structs where all fields are comparable
// - Arrays where the element type is comparable
//
// Non-comparable types include:
// - Slices
// - Maps
// - Functions
func isComparableType(expr ast.Expr) bool {
switch t := expr.(type) {
case *ast.Ident:
// Basic types and named types
// We assume named types are comparable unless they're known non-comparable types
name := t.Name
// Known non-comparable built-in types
if name == "error" {
// error is an interface, which is comparable
return true
}
// Most basic types and named types are comparable
// We can't determine if a custom type is comparable without type checking,
// so we assume it is (conservative approach)
return true
case *ast.StarExpr:
// Pointer types are always comparable
return true
case *ast.ArrayType:
// Arrays are comparable if their element type is comparable
if t.Len == nil {
// This is a slice (no length), slices are not comparable
return false
}
// Fixed-size array, check element type
return isComparableType(t.Elt)
case *ast.MapType:
// Maps are not comparable
return false
case *ast.FuncType:
// Functions are not comparable
return false
case *ast.InterfaceType:
// Interface types are comparable
return true
case *ast.StructType:
// Structs are comparable if all fields are comparable
// We can't easily determine this without full type information,
// so we conservatively return false for struct literals
return false
case *ast.SelectorExpr:
// Qualified identifier (e.g., pkg.Type)
// We can't determine comparability without type information
// Check for known non-comparable types from standard library
if ident, ok := t.X.(*ast.Ident); ok {
pkgName := ident.Name
typeName := t.Sel.Name
// Check for known non-comparable types
if pkgName == "context" && typeName == "Context" {
// context.Context is an interface, which is comparable
return true
}
// For other qualified types, we assume they're comparable
// This is a conservative approach
}
return true
case *ast.IndexExpr, *ast.IndexListExpr:
// Generic types - we can't determine comparability without type information
// For common generic types, we can make educated guesses
var baseExpr ast.Expr
if idx, ok := t.(*ast.IndexExpr); ok {
baseExpr = idx.X
} else if idxList, ok := t.(*ast.IndexListExpr); ok {
baseExpr = idxList.X
}
if sel, ok := baseExpr.(*ast.SelectorExpr); ok {
if ident, ok := sel.X.(*ast.Ident); ok {
pkgName := ident.Name
typeName := sel.Sel.Name
// Check for known non-comparable generic types
if pkgName == "option" && typeName == "Option" {
// Option types are not comparable (they contain a slice internally)
return false
}
if pkgName == "either" && typeName == "Either" {
// Either types are not comparable
return false
}
}
}
// For other generic types, conservatively assume not comparable
return false
case *ast.ChanType:
// Channel types are comparable
return true
default:
// Unknown type, conservatively assume not comparable
return false
}
}
// parseFile parses a Go file and extracts structs with lens annotations
func parseFile(filename string) ([]structInfo, string, error) {
fset := token.NewFileSet()
@@ -331,6 +446,7 @@ func parseFile(filename string) ([]structInfo, string, error) {
typeName := getTypeName(field.Type)
isOptional := false
baseType := typeName
isComparable := false
// Check if field is optional:
// 1. Pointer types are always optional
@@ -344,6 +460,10 @@ func parseFile(filename string) ([]structInfo, string, error) {
isOptional = true
}
// Check if the type is comparable (for non-optional fields)
// For optional fields, we don't need to check since they use LensO
isComparable = isComparableType(field.Type)
// Extract imports from this field's type
fieldImports := make(map[string]string)
extractImports(field.Type, fieldImports)
@@ -356,10 +476,11 @@ func parseFile(filename string) ([]structInfo, string, error) {
}
fields = append(fields, fieldInfo{
Name: name.Name,
TypeName: typeName,
BaseType: baseType,
IsOptional: isOptional,
Name: name.Name,
TypeName: typeName,
BaseType: baseType,
IsOptional: isOptional,
IsComparable: isComparable,
})
}
}

View File

@@ -168,6 +168,91 @@ func TestIsPointerType(t *testing.T) {
}
}
func TestIsComparableType(t *testing.T) {
tests := []struct {
name string
code string
expected bool
}{
{
name: "basic type - string",
code: "type T struct { F string }",
expected: true,
},
{
name: "basic type - int",
code: "type T struct { F int }",
expected: true,
},
{
name: "basic type - bool",
code: "type T struct { F bool }",
expected: true,
},
{
name: "pointer type",
code: "type T struct { F *string }",
expected: true,
},
{
name: "slice type - not comparable",
code: "type T struct { F []string }",
expected: false,
},
{
name: "map type - not comparable",
code: "type T struct { F map[string]int }",
expected: false,
},
{
name: "array type - comparable if element is",
code: "type T struct { F [5]int }",
expected: true,
},
{
name: "interface type",
code: "type T struct { F interface{} }",
expected: true,
},
{
name: "channel type",
code: "type T struct { F chan int }",
expected: true,
},
{
name: "function type - not comparable",
code: "type T struct { F func() }",
expected: false,
},
{
name: "struct literal - conservatively not comparable",
code: "type T struct { F struct{ X int } }",
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fset := token.NewFileSet()
file, err := parser.ParseFile(fset, "", "package test\n"+tt.code, 0)
require.NoError(t, err)
var fieldType ast.Expr
ast.Inspect(file, func(n ast.Node) bool {
if field, ok := n.(*ast.Field); ok && len(field.Names) > 0 {
fieldType = field.Type
return false
}
return true
})
require.NotNil(t, fieldType)
result := isComparableType(fieldType)
assert.Equal(t, tt.expected, result)
})
}
}
func TestHasOmitEmpty(t *testing.T) {
tests := []struct {
name string
@@ -337,6 +422,167 @@ type Config struct {
assert.False(t, config.Fields[4].IsOptional, "Required field without omitempty should not be optional")
}
func TestParseFileWithComparableTypes(t *testing.T) {
// Create a temporary test file
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.go")
testCode := `package testpkg
// fp-go:Lens
type TypeTest struct {
Name string
Age int
Pointer *string
Slice []string
Map map[string]int
Channel chan int
}
`
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Parse the file
structs, pkg, err := parseFile(testFile)
require.NoError(t, err)
// Verify results
assert.Equal(t, "testpkg", pkg)
assert.Len(t, structs, 1)
// Check TypeTest struct
typeTest := structs[0]
assert.Equal(t, "TypeTest", typeTest.Name)
assert.Len(t, typeTest.Fields, 6)
// Name - string is comparable
assert.Equal(t, "Name", typeTest.Fields[0].Name)
assert.Equal(t, "string", typeTest.Fields[0].TypeName)
assert.False(t, typeTest.Fields[0].IsOptional)
assert.True(t, typeTest.Fields[0].IsComparable, "string should be comparable")
// Age - int is comparable
assert.Equal(t, "Age", typeTest.Fields[1].Name)
assert.Equal(t, "int", typeTest.Fields[1].TypeName)
assert.False(t, typeTest.Fields[1].IsOptional)
assert.True(t, typeTest.Fields[1].IsComparable, "int should be comparable")
// Pointer - pointer is optional, IsComparable not checked for optional fields
assert.Equal(t, "Pointer", typeTest.Fields[2].Name)
assert.Equal(t, "*string", typeTest.Fields[2].TypeName)
assert.True(t, typeTest.Fields[2].IsOptional)
// Slice - not comparable
assert.Equal(t, "Slice", typeTest.Fields[3].Name)
assert.Equal(t, "[]string", typeTest.Fields[3].TypeName)
assert.False(t, typeTest.Fields[3].IsOptional)
assert.False(t, typeTest.Fields[3].IsComparable, "slice should not be comparable")
// Map - not comparable
assert.Equal(t, "Map", typeTest.Fields[4].Name)
assert.Equal(t, "map[string]int", typeTest.Fields[4].TypeName)
assert.False(t, typeTest.Fields[4].IsOptional)
assert.False(t, typeTest.Fields[4].IsComparable, "map should not be comparable")
// Channel - comparable (note: getTypeName returns "any" for channel types, but isComparableType correctly identifies them)
assert.Equal(t, "Channel", typeTest.Fields[5].Name)
assert.Equal(t, "any", typeTest.Fields[5].TypeName) // getTypeName doesn't handle chan types specifically
assert.False(t, typeTest.Fields[5].IsOptional)
assert.True(t, typeTest.Fields[5].IsComparable, "channel should be comparable")
}
func TestLensRefTemplatesWithComparable(t *testing.T) {
s := structInfo{
Name: "TestStruct",
Fields: []fieldInfo{
{Name: "Name", TypeName: "string", IsOptional: false, IsComparable: true},
{Name: "Age", TypeName: "int", IsOptional: false, IsComparable: true},
{Name: "Data", TypeName: "[]byte", IsOptional: false, IsComparable: false},
{Name: "Pointer", TypeName: "*string", IsOptional: true, IsComparable: false},
},
}
// Test constructor template for RefLenses
var constructorBuf bytes.Buffer
err := constructorTmpl.Execute(&constructorBuf, s)
require.NoError(t, err)
constructorStr := constructorBuf.String()
// Check that MakeLensStrict is used for comparable types in RefLenses
assert.Contains(t, constructorStr, "func MakeTestStructRefLenses() TestStructRefLenses")
// Name field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "Name: L.MakeLensStrict(",
"comparable field Name should use MakeLensStrict in RefLenses")
// Age field - comparable, should use MakeLensStrict
assert.Contains(t, constructorStr, "Age: L.MakeLensStrict(",
"comparable field Age should use MakeLensStrict in RefLenses")
// Data field - not comparable, should use MakeLensRef
assert.Contains(t, constructorStr, "Data: L.MakeLensRef(",
"non-comparable field Data should use MakeLensRef in RefLenses")
}
func TestGenerateLensHelpersWithComparable(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()
testCode := `package testpkg
// fp-go:Lens
type TestStruct struct {
Name string
Count int
Data []byte
}
`
testFile := filepath.Join(tmpDir, "test.go")
err := os.WriteFile(testFile, []byte(testCode), 0644)
require.NoError(t, err)
// Generate lens code
outputFile := "gen.go"
err = generateLensHelpers(tmpDir, outputFile, false)
require.NoError(t, err)
// Verify the generated file exists
genPath := filepath.Join(tmpDir, outputFile)
_, err = os.Stat(genPath)
require.NoError(t, err)
// Read and verify the generated content
content, err := os.ReadFile(genPath)
require.NoError(t, err)
contentStr := string(content)
// Check for expected content in RefLenses
assert.Contains(t, contentStr, "MakeTestStructRefLenses")
// Name and Count are comparable, should use MakeLensStrict
assert.Contains(t, contentStr, "L.MakeLensStrict",
"comparable fields should use MakeLensStrict in RefLenses")
// Data is not comparable (slice), should use MakeLensRef
assert.Contains(t, contentStr, "L.MakeLensRef",
"non-comparable fields should use MakeLensRef in RefLenses")
// Verify the pattern appears for Name field (comparable)
namePattern := "Name: L.MakeLensStrict("
assert.Contains(t, contentStr, namePattern,
"Name field should use MakeLensStrict")
// Verify the pattern appears for Data field (not comparable)
dataPattern := "Data: L.MakeLensRef("
assert.Contains(t, contentStr, dataPattern,
"Data field should use MakeLensRef")
}
func TestGenerateLensHelpers(t *testing.T) {
// Create a temporary directory with test files
tmpDir := t.TempDir()

View File

@@ -15,7 +15,84 @@
package eq
// Contramap implements an Equals predicate based on a mapping
// Contramap creates an Eq[B] from an Eq[A] by providing a function that maps B to A.
// This is a contravariant functor operation that allows you to transform equality predicates
// by mapping the input type. It's particularly useful for comparing complex types by
// extracting comparable fields.
//
// The name "contramap" comes from category theory, where it represents a contravariant
// functor. Unlike regular map (covariant), which transforms the output, contramap
// transforms the input in the opposite direction.
//
// Type Parameters:
// - A: The type that has an existing Eq instance
// - B: The type for which we want to create an Eq instance
//
// Parameters:
// - f: A function that extracts or converts a value of type B to type A
//
// Returns:
// - A function that takes an Eq[A] and returns an Eq[B]
//
// The resulting Eq[B] compares two B values by:
// 1. Applying f to both values to get A values
// 2. Using the original Eq[A] to compare those A values
//
// Example - Compare structs by a single field:
//
// type Person struct {
// ID int
// Name string
// Age int
// }
//
// // Compare persons by ID only
// personEqByID := eq.Contramap(func(p Person) int {
// return p.ID
// })(eq.FromStrictEquals[int]())
//
// p1 := Person{ID: 1, Name: "Alice", Age: 30}
// p2 := Person{ID: 1, Name: "Bob", Age: 25}
// assert.True(t, personEqByID.Equals(p1, p2)) // Same ID, different names
//
// Example - Case-insensitive string comparison:
//
// type User struct {
// Username string
// Email string
// }
//
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
//
// userEqByUsername := eq.Contramap(func(u User) string {
// return u.Username
// })(caseInsensitiveEq)
//
// u1 := User{Username: "Alice", Email: "alice@example.com"}
// u2 := User{Username: "ALICE", Email: "different@example.com"}
// assert.True(t, userEqByUsername.Equals(u1, u2)) // Case-insensitive match
//
// Example - Nested field access:
//
// type Address struct {
// City string
// }
//
// type Person struct {
// Name string
// Address Address
// }
//
// // Compare persons by city
// personEqByCity := eq.Contramap(func(p Person) string {
// return p.Address.City
// })(eq.FromStrictEquals[string]())
//
// Contramap Law:
// Contramap must satisfy: Contramap(f)(Contramap(g)(eq)) = Contramap(g ∘ f)(eq)
// This means contramapping twice is the same as contramapping with the composed function.
func Contramap[A, B any](f func(b B) A) func(Eq[A]) Eq[B] {
return func(fa Eq[A]) Eq[B] {
equals := fa.Equals

View File

@@ -19,38 +19,188 @@ import (
F "github.com/IBM/fp-go/v2/function"
)
// Eq represents an equality type class for type T.
// It provides a way to define custom equality semantics for any type,
// not just those that are comparable with Go's == operator.
//
// Type Parameters:
// - T: The type for which equality is defined
//
// Methods:
// - Equals(x, y T) bool: Returns true if x and y are considered equal
//
// Laws:
// An Eq instance must satisfy the equivalence relation laws:
// 1. Reflexivity: Equals(x, x) = true for all x
// 2. Symmetry: Equals(x, y) = Equals(y, x) for all x, y
// 3. Transitivity: If Equals(x, y) and Equals(y, z), then Equals(x, z)
//
// Example:
//
// // Create an equality predicate for integers
// intEq := eq.FromStrictEquals[int]()
// assert.True(t, intEq.Equals(42, 42))
// assert.False(t, intEq.Equals(42, 43))
//
// // Create a custom equality predicate
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
// assert.True(t, caseInsensitiveEq.Equals("Hello", "HELLO"))
type Eq[T any] interface {
// Equals returns true if x and y are considered equal according to this equality predicate.
//
// Parameters:
// - x: The first value to compare
// - y: The second value to compare
//
// Returns:
// - true if x and y are equal, false otherwise
Equals(x, y T) bool
}
// eq is the internal implementation of the Eq interface.
// It wraps a comparison function to provide the Eq interface.
type eq[T any] struct {
c func(x, y T) bool
}
// Equals implements the Eq interface by delegating to the wrapped comparison function.
func (e eq[T]) Equals(x, y T) bool {
return e.c(x, y)
}
// strictEq is a helper function that uses Go's built-in == operator for comparison.
// It can only be used with comparable types.
func strictEq[A comparable](a, b A) bool {
return a == b
}
// FromStrictEquals constructs an [EQ.Eq] from the canonical comparison function
// FromStrictEquals constructs an Eq instance using Go's built-in == operator.
// This is the most common way to create an Eq for types that support ==.
//
// Type Parameters:
// - T: Must be a comparable type (supports ==)
//
// Returns:
// - An Eq[T] that uses == for equality comparison
//
// Example:
//
// intEq := eq.FromStrictEquals[int]()
// assert.True(t, intEq.Equals(42, 42))
// assert.False(t, intEq.Equals(42, 43))
//
// stringEq := eq.FromStrictEquals[string]()
// assert.True(t, stringEq.Equals("hello", "hello"))
// assert.False(t, stringEq.Equals("hello", "world"))
//
// Note: For types that are not comparable or require custom equality logic,
// use FromEquals instead.
func FromStrictEquals[T comparable]() Eq[T] {
return FromEquals(strictEq[T])
}
// FromEquals constructs an [EQ.Eq] from the comparison function
// FromEquals constructs an Eq instance from a custom comparison function.
// This allows defining equality for any type, including non-comparable types
// or types that need custom equality semantics.
//
// Type Parameters:
// - T: The type for which equality is being defined (can be any type)
//
// Parameters:
// - c: A function that takes two values of type T and returns true if they are equal
//
// Returns:
// - An Eq[T] that uses the provided function for equality comparison
//
// Example:
//
// // Case-insensitive string equality
// caseInsensitiveEq := eq.FromEquals(func(a, b string) bool {
// return strings.EqualFold(a, b)
// })
// assert.True(t, caseInsensitiveEq.Equals("Hello", "HELLO"))
//
// // Approximate float equality
// approxEq := eq.FromEquals(func(a, b float64) bool {
// return math.Abs(a-b) < 0.0001
// })
// assert.True(t, approxEq.Equals(1.0, 1.00009))
//
// // Custom struct equality (compare by specific fields)
// type Person struct { ID int; Name string }
// personEq := eq.FromEquals(func(a, b Person) bool {
// return a.ID == b.ID // Compare only by ID
// })
//
// Note: The provided function should satisfy the equivalence relation laws
// (reflexivity, symmetry, transitivity) for correct behavior.
func FromEquals[T any](c func(x, y T) bool) Eq[T] {
return eq[T]{c: c}
}
// Empty returns the equals predicate that is always true
// Empty returns an Eq instance that always returns true for any comparison.
// This is the identity element for the Eq Monoid and is useful when you need
// an equality predicate that accepts everything.
//
// Type Parameters:
// - T: The type for which the always-true equality is defined
//
// Returns:
// - An Eq[T] where Equals(x, y) always returns true
//
// Example:
//
// alwaysTrue := eq.Empty[int]()
// assert.True(t, alwaysTrue.Equals(1, 2))
// assert.True(t, alwaysTrue.Equals(42, 100))
//
// // Useful as identity in monoid operations
// monoid := eq.Monoid[string]()
// combined := monoid.Concat(eq.FromStrictEquals[string](), monoid.Empty())
// // combined behaves the same as FromStrictEquals
//
// Use cases:
// - As the identity element in Monoid operations
// - When you need a placeholder equality that accepts everything
// - In generic code that requires an Eq but doesn't need actual comparison
func Empty[T any]() Eq[T] {
return FromEquals(F.Constant2[T, T](true))
}
// Equals returns a predicate to test if one value equals the other under an equals predicate
// Equals returns a curried equality checking function.
// This is useful for partial application and functional composition.
//
// Type Parameters:
// - T: The type being compared
//
// Parameters:
// - eq: The Eq instance to use for comparison
//
// Returns:
// - A function that takes a value and returns another function that checks equality with that value
//
// Example:
//
// intEq := eq.FromStrictEquals[int]()
// equals42 := eq.Equals(intEq)(42)
//
// assert.True(t, equals42(42))
// assert.False(t, equals42(43))
//
// // Use in higher-order functions
// numbers := []int{40, 41, 42, 43, 44}
// filtered := array.Filter(equals42)(numbers)
// // filtered = [42]
//
// // Partial application
// equalsFunc := eq.Equals(intEq)
// equals10 := equalsFunc(10)
// equals20 := equalsFunc(20)
//
// This is particularly useful when working with functional programming patterns
// like map, filter, and other higher-order functions.
func Equals[T any](eq Eq[T]) func(T) func(T) bool {
return func(other T) func(T) bool {
return F.Bind2nd(eq.Equals, other)

View File

@@ -20,6 +20,65 @@ import (
S "github.com/IBM/fp-go/v2/semigroup"
)
// Semigroup returns a Semigroup instance for Eq[A].
// A Semigroup provides a way to combine two values of the same type.
// For Eq, the combination uses logical AND - two values are equal only if
// they are equal according to BOTH equality predicates.
//
// Type Parameters:
// - A: The type for which equality predicates are being combined
//
// Returns:
// - A Semigroup[Eq[A]] that combines equality predicates with logical AND
//
// The Concat operation satisfies:
// - Associativity: Concat(Concat(x, y), z) = Concat(x, Concat(y, z))
//
// Example - Combine multiple equality checks:
//
// type User struct {
// Username string
// Email string
// }
//
// usernameEq := eq.Contramap(func(u User) string {
// return u.Username
// })(eq.FromStrictEquals[string]())
//
// emailEq := eq.Contramap(func(u User) string {
// return u.Email
// })(eq.FromStrictEquals[string]())
//
// // Users are equal only if BOTH username AND email match
// userEq := eq.Semigroup[User]().Concat(usernameEq, emailEq)
//
// u1 := User{Username: "alice", Email: "alice@example.com"}
// u2 := User{Username: "alice", Email: "alice@example.com"}
// u3 := User{Username: "alice", Email: "different@example.com"}
//
// assert.True(t, userEq.Equals(u1, u2)) // Both match
// assert.False(t, userEq.Equals(u1, u3)) // Email differs
//
// Example - Combine multiple field checks:
//
// type Product struct {
// ID int
// Name string
// Price float64
// }
//
// idEq := eq.Contramap(func(p Product) int { return p.ID })(eq.FromStrictEquals[int]())
// nameEq := eq.Contramap(func(p Product) string { return p.Name })(eq.FromStrictEquals[string]())
// priceEq := eq.Contramap(func(p Product) float64 { return p.Price })(eq.FromStrictEquals[float64]())
//
// sg := eq.Semigroup[Product]()
// // All three fields must match
// productEq := sg.Concat(sg.Concat(idEq, nameEq), priceEq)
//
// Use cases:
// - Combining multiple field comparisons for struct equality
// - Building complex equality predicates from simpler ones
// - Ensuring all conditions are met (logical AND of predicates)
func Semigroup[A any]() S.Semigroup[Eq[A]] {
return S.MakeSemigroup(func(x, y Eq[A]) Eq[A] {
return FromEquals(func(a, b A) bool {
@@ -28,6 +87,67 @@ func Semigroup[A any]() S.Semigroup[Eq[A]] {
})
}
// Monoid returns a Monoid instance for Eq[A].
// A Monoid extends Semigroup with an identity element (Empty).
// For Eq, the identity is an equality predicate that always returns true.
//
// Type Parameters:
// - A: The type for which the equality monoid is defined
//
// Returns:
// - A Monoid[Eq[A]] with:
// - Concat: Combines equality predicates with logical AND (from Semigroup)
// - Empty: An equality predicate that always returns true (identity element)
//
// Monoid Laws:
// 1. Left Identity: Concat(Empty(), x) = x
// 2. Right Identity: Concat(x, Empty()) = x
// 3. Associativity: Concat(Concat(x, y), z) = Concat(x, Concat(y, z))
//
// Example - Using the identity element:
//
// monoid := eq.Monoid[int]()
// intEq := eq.FromStrictEquals[int]()
//
// // Empty is the identity - combining with it doesn't change behavior
// leftIdentity := monoid.Concat(monoid.Empty(), intEq)
// rightIdentity := monoid.Concat(intEq, monoid.Empty())
//
// assert.True(t, leftIdentity.Equals(42, 42))
// assert.False(t, leftIdentity.Equals(42, 43))
// assert.True(t, rightIdentity.Equals(42, 42))
// assert.False(t, rightIdentity.Equals(42, 43))
//
// Example - Empty always returns true:
//
// monoid := eq.Monoid[string]()
// alwaysTrue := monoid.Empty()
//
// assert.True(t, alwaysTrue.Equals("hello", "world"))
// assert.True(t, alwaysTrue.Equals("same", "same"))
// assert.True(t, alwaysTrue.Equals("", "anything"))
//
// Example - Building complex equality with fold:
//
// type Person struct {
// FirstName string
// LastName string
// Age int
// }
//
// firstNameEq := eq.Contramap(func(p Person) string { return p.FirstName })(eq.FromStrictEquals[string]())
// lastNameEq := eq.Contramap(func(p Person) string { return p.LastName })(eq.FromStrictEquals[string]())
// ageEq := eq.Contramap(func(p Person) int { return p.Age })(eq.FromStrictEquals[int]())
//
// monoid := eq.Monoid[Person]()
// // Combine all predicates - all fields must match
// personEq := monoid.Concat(monoid.Concat(firstNameEq, lastNameEq), ageEq)
//
// Use cases:
// - Providing a neutral element for equality combinations
// - Generic algorithms that require a Monoid instance
// - Folding multiple equality predicates into one
// - Default "accept everything" equality predicate
func Monoid[A any]() M.Monoid[Eq[A]] {
return M.MakeMonoid(Semigroup[A]().Concat, Empty[A]())
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,105 @@
package function
// Flip reverses the order of parameters of a curried function
// Flip reverses the order of parameters of a curried function.
//
// Given a curried function f that takes T1 then T2 and returns R,
// Flip returns a new curried function that takes T2 then T1 and returns R.
// This is useful when you have a curried function but need to apply its
// arguments in a different order.
//
// Mathematical notation:
// - Given: f: T1 → T2 → R
// - Returns: g: T2 → T1 → R where g(t2)(t1) = f(t1)(t2)
//
// Type Parameters:
// - T1: The type of the first parameter (becomes second after flip)
// - T2: The type of the second parameter (becomes first after flip)
// - R: The return type
//
// Parameters:
// - f: A curried function taking T1 then T2 and returning R
//
// Returns:
// - A new curried function taking T2 then T1 and returning R
//
// Relationship to Swap:
//
// Flip is the curried version of Swap. While Swap works with binary functions,
// Flip works with curried functions:
// - Swap: func(T1, T2) R → func(T2, T1) R
// - Flip: func(T1) func(T2) R → func(T2) func(T1) R
//
// Example - Basic usage:
//
// // Create a curried division function
// divide := Curry2(func(a, b float64) float64 { return a / b })
// // divide(10)(2) = 5.0 (10 / 2)
//
// // Flip the parameter order
// divideFlipped := Flip(divide)
// // divideFlipped(10)(2) = 0.2 (2 / 10)
//
// Example - String formatting:
//
// // Curried string formatter: format(template)(value)
// format := Curry2(func(template, value string) string {
// return fmt.Sprintf(template, value)
// })
//
// // Normal order: template first, then value
// result1 := format("Hello, %s!")("World") // "Hello, World!"
//
// // Flipped order: value first, then template
// formatFlipped := Flip(format)
// result2 := formatFlipped("Hello, %s!")("World") // "Hello, World!"
//
// // Useful for partial application in different order
// greetWorld := format("Hello, %s!")
// greetWorld("Alice") // "Hello, Alice!"
//
// formatAlice := formatFlipped("Alice")
// formatAlice("Hello, %s!") // "Hello, Alice!"
//
// Example - Practical use case with map operations:
//
// // Curried map lookup: getFrom(map)(key)
// getFrom := Curry2(func(m map[string]int, key string) int {
// return m[key]
// })
//
// data := map[string]int{"a": 1, "b": 2, "c": 3}
//
// // Create a getter for this specific map
// getValue := getFrom(data)
// getValue("a") // 1
//
// // Flip to create key-first version: get(key)(map)
// get := Flip(getFrom)
// getA := get("a")
// getA(data) // 1
//
// Example - Combining with other functional patterns:
//
// // Curried append: append(slice)(element)
// appendTo := Curry2(func(slice []int, elem int) []int {
// return append(slice, elem)
// })
//
// // Flip to get: prepend(element)(slice)
// prepend := Flip(appendTo)
//
// nums := []int{1, 2, 3}
// add4 := appendTo(nums)
// result1 := add4(4) // [1, 2, 3, 4]
//
// prependZero := prepend(0)
// result2 := prependZero(nums) // [1, 2, 3, 0]
//
// See also:
// - Swap: For flipping parameters of non-curried binary functions
// - Curry2: For converting binary functions to curried form
// - Uncurry2: For converting curried functions back to binary form
func Flip[T1, T2, R any](f func(T1) func(T2) R) func(T2) func(T1) R {
return func(t2 T2) func(T1) R {
return func(t1 T1) R {

View File

@@ -22,15 +22,265 @@ import (
"github.com/stretchr/testify/assert"
)
// TestFlip tests the Flip function with various scenarios
func TestFlip(t *testing.T) {
t.Run("flips string concatenation", func(t *testing.T) {
// Create a curried function that formats strings
format := Curry2(func(a, b string) string {
return fmt.Sprintf("%s:%s", a, b)
})
x := Curry2(func(a, b string) string {
return fmt.Sprintf("%s:%s", a, b)
// Original order: a then b
assert.Equal(t, "a:b", format("a")("b"))
assert.Equal(t, "hello:world", format("hello")("world"))
// Flipped order: b then a
flipped := Flip(format)
assert.Equal(t, "b:a", flipped("a")("b"))
assert.Equal(t, "world:hello", flipped("hello")("world"))
})
assert.Equal(t, "a:b", x("a")("b"))
t.Run("flips numeric operations", func(t *testing.T) {
// Curried subtraction: subtract(a)(b) = a - b
subtract := Curry2(func(a, b int) int {
return a - b
})
y := Flip(x)
// Original: 10 - 3 = 7
assert.Equal(t, 7, subtract(10)(3))
assert.Equal(t, "b:a", y("a")("b"))
// Flipped: 3 - 10 = -7
flipped := Flip(subtract)
assert.Equal(t, -7, flipped(10)(3))
})
t.Run("flips division", func(t *testing.T) {
// Curried division: divide(a)(b) = a / b
divide := Curry2(func(a, b float64) float64 {
return a / b
})
// Original: 10 / 2 = 5.0
assert.Equal(t, 5.0, divide(10)(2))
// Flipped: 2 / 10 = 0.2
flipped := Flip(divide)
assert.Equal(t, 0.2, flipped(10)(2))
})
t.Run("flips with partial application", func(t *testing.T) {
// Curried append-like operation
prepend := Curry2(func(prefix, text string) string {
return prefix + text
})
// Create specialized functions with original order
addHello := prepend("Hello, ")
assert.Equal(t, "Hello, World", addHello("World"))
assert.Equal(t, "Hello, Go", addHello("Go"))
// Flip and create specialized functions with reversed order
flipped := Flip(prepend)
addToWorld := flipped("World")
assert.Equal(t, "Hello, World", addToWorld("Hello, "))
assert.Equal(t, "Goodbye, World", addToWorld("Goodbye, "))
})
t.Run("flips with different types", func(t *testing.T) {
// Curried function with different input types
repeat := Curry2(func(s string, n int) string {
result := ""
for i := 0; i < n; i++ {
result += s
}
return result
})
// Original: repeat("x")(3) = "xxx"
assert.Equal(t, "xxx", repeat("x")(3))
assert.Equal(t, "abab", repeat("ab")(2))
// Flipped: repeat(3)("x") = "xxx"
flipped := Flip(repeat)
assert.Equal(t, "xxx", flipped(3)("x"))
assert.Equal(t, "abab", flipped(2)("ab"))
})
t.Run("double flip returns to original", func(t *testing.T) {
// Flipping twice should return to original behavior
original := Curry2(func(a, b string) string {
return a + "-" + b
})
flipped := Flip(original)
doubleFlipped := Flip(flipped)
// Original and double-flipped should behave the same
assert.Equal(t, original("a")("b"), doubleFlipped("a")("b"))
assert.Equal(t, "a-b", doubleFlipped("a")("b"))
})
t.Run("flips with complex types", func(t *testing.T) {
type Person struct {
Name string
Age int
}
// Curried function creating a person
makePerson := Curry2(func(name string, age int) Person {
return Person{Name: name, Age: age}
})
// Original order: name then age
alice := makePerson("Alice")(30)
assert.Equal(t, "Alice", alice.Name)
assert.Equal(t, 30, alice.Age)
// Flipped order: age then name
flipped := Flip(makePerson)
bob := flipped(25)("Bob")
assert.Equal(t, "Bob", bob.Name)
assert.Equal(t, 25, bob.Age)
})
t.Run("flips map operations", func(t *testing.T) {
// Curried map getter: get(map)(key)
get := Curry2(func(m map[string]int, key string) int {
return m[key]
})
data := map[string]int{"a": 1, "b": 2, "c": 3}
// Original: provide map first, then key
getValue := get(data)
assert.Equal(t, 1, getValue("a"))
assert.Equal(t, 2, getValue("b"))
// Flipped: provide key first, then map
flipped := Flip(get)
getA := flipped("a")
assert.Equal(t, 1, getA(data))
data2 := map[string]int{"a": 10, "b": 20}
assert.Equal(t, 10, getA(data2))
})
t.Run("flips boolean operations", func(t *testing.T) {
// Curried logical operation
implies := Curry2(func(a, b bool) bool {
return !a || b
})
// Test truth table for implication
assert.True(t, implies(true)(true)) // T → T = T
assert.False(t, implies(true)(false)) // T → F = F
assert.True(t, implies(false)(true)) // F → T = T
assert.True(t, implies(false)(false)) // F → F = T
// Flipped version (reverse implication)
flipped := Flip(implies)
assert.True(t, flipped(true)(true)) // T ← T = T
assert.True(t, flipped(true)(false)) // T ← F = T
assert.False(t, flipped(false)(true)) // F ← T = F
assert.True(t, flipped(false)(false)) // F ← F = T
})
t.Run("flips with slice operations", func(t *testing.T) {
// Curried slice append
appendTo := Curry2(func(slice []int, elem int) []int {
return append(slice, elem)
})
nums := []int{1, 2, 3}
// Original: provide slice first, then element
add4 := appendTo(nums)
result1 := add4(4)
assert.Equal(t, []int{1, 2, 3, 4}, result1)
// Flipped: provide element first, then slice
flipped := Flip(appendTo)
appendFive := flipped(5)
result2 := appendFive(nums)
assert.Equal(t, []int{1, 2, 3, 5}, result2)
})
}
// TestFlipProperties tests mathematical properties of Flip
func TestFlipProperties(t *testing.T) {
t.Run("flip is involutive (flip . flip = id)", func(t *testing.T) {
// Flipping twice should give back the original function behavior
original := Curry2(func(a, b int) int {
return a*10 + b
})
flipped := Flip(original)
doubleFlipped := Flip(flipped)
// Test with multiple inputs
testCases := []struct{ a, b int }{
{1, 2},
{5, 7},
{0, 0},
{-1, 3},
}
for _, tc := range testCases {
assert.Equal(t,
original(tc.a)(tc.b),
doubleFlipped(tc.a)(tc.b),
"flip(flip(f)) should equal f for inputs (%d, %d)", tc.a, tc.b)
}
})
t.Run("flip preserves function composition", func(t *testing.T) {
// If we have f: A → B → C and g: C → D
// then g ∘ f(a)(b) = g(f(a)(b))
// and g ∘ flip(f)(b)(a) = g(flip(f)(b)(a))
f := Curry2(func(a, b int) int {
return a + b
})
g := func(n int) int {
return n * 2
}
flippedF := Flip(f)
// Compose g with f
composed1 := func(a, b int) int {
return g(f(a)(b))
}
// Compose g with flipped f
composed2 := func(a, b int) int {
return g(flippedF(b)(a))
}
// Both should give the same result
assert.Equal(t, composed1(3, 5), composed2(3, 5))
assert.Equal(t, 16, composed1(3, 5)) // (3 + 5) * 2 = 16
})
}
// BenchmarkFlip benchmarks the Flip function
func BenchmarkFlip(b *testing.B) {
add := Curry2(func(a, b int) int {
return a + b
})
flipped := Flip(add)
b.Run("original", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = add(i)(i + 1)
}
})
b.Run("flipped", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = flipped(i)(i + 1)
}
})
}

233
v2/optics/iso/lens/doc.go Normal file
View File

@@ -0,0 +1,233 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package lens provides conversions from isomorphisms to lenses.
# Overview
This package bridges the gap between isomorphisms (bidirectional transformations)
and lenses (focused accessors). Since every isomorphism can be viewed as a lens,
this package provides functions to perform that conversion.
An isomorphism Iso[S, A] represents a lossless bidirectional transformation between
types S and A. A lens Lens[S, A] provides focused access to a part A within a
structure S. Since an isomorphism can transform the entire structure S to A and back,
it naturally forms a lens that focuses on the "whole as a part".
# Mathematical Foundation
Given an Iso[S, A] with:
- Get: S → A (forward transformation)
- ReverseGet: A → S (reverse transformation)
We can construct a Lens[S, A] with:
- Get: S → A (same as iso's Get)
- Set: A → S → S (implemented as: a => s => ReverseGet(a))
The lens laws are automatically satisfied because the isomorphism laws guarantee:
1. GetSet: Set(Get(s))(s) == s (from iso's round-trip law)
2. SetGet: Get(Set(a)(s)) == a (from iso's inverse law)
3. SetSet: Set(a2)(Set(a1)(s)) == Set(a2)(s) (trivially true)
# Basic Usage
Converting an isomorphism to a lens:
type Celsius float64
type Kelvin float64
// Create an isomorphism between Celsius and Kelvin
celsiusKelvinIso := iso.MakeIso(
func(c Celsius) Kelvin { return Kelvin(c + 273.15) },
func(k Kelvin) Celsius { return Celsius(k - 273.15) },
)
// Convert to a lens
celsiusKelvinLens := lens.IsoAsLens(celsiusKelvinIso)
// Use as a lens
celsius := Celsius(20.0)
kelvin := celsiusKelvinLens.Get(celsius) // 293.15 K
updated := celsiusKelvinLens.Set(Kelvin(300))(celsius) // 26.85°C
# Working with Pointers
For pointer-based structures, use IsoAsLensRef:
type UserId int
type User struct {
id UserId
name string
}
// Isomorphism between User pointer and UserId
userIdIso := iso.MakeIso(
func(u *User) UserId { return u.id },
func(id UserId) *User { return &User{id: id, name: "Unknown"} },
)
// Convert to a reference lens
userIdLens := lens.IsoAsLensRef(userIdIso)
user := &User{id: 42, name: "Alice"}
id := userIdLens.Get(user) // 42
updated := userIdLens.Set(UserId(100))(user) // New user with id 100
# Use Cases
1. Type Wrappers: Convert between newtype wrappers and their underlying types
type Email string
type ValidatedEmail struct{ value Email }
emailIso := iso.MakeIso(
func(ve ValidatedEmail) Email { return ve.value },
func(e Email) ValidatedEmail { return ValidatedEmail{value: e} },
)
emailLens := lens.IsoAsLens(emailIso)
2. Unit Conversions: Work with different units of measurement
type Meters float64
type Feet float64
metersFeetIso := iso.MakeIso(
func(m Meters) Feet { return Feet(m * 3.28084) },
func(f Feet) Meters { return Meters(f / 3.28084) },
)
distanceLens := lens.IsoAsLens(metersFeetIso)
3. Encoding/Decoding: Transform between different representations
type JSON string
type Config struct {
Host string
Port int
}
// Assuming encode/decode functions exist
configIso := iso.MakeIso(encode, decode)
configLens := lens.IsoAsLens(configIso)
# Composition
Lenses created from isomorphisms can be composed with other lenses:
type Temperature struct {
celsius Celsius
}
// Lens to access celsius field
celsiusFieldLens := L.MakeLens(
func(t Temperature) Celsius { return t.celsius },
func(t Temperature, c Celsius) Temperature {
t.celsius = c
return t
},
)
// Compose with iso-based lens to work with Kelvin
tempKelvinLens := F.Pipe1(
celsiusFieldLens,
L.Compose[Temperature](celsiusKelvinLens),
)
temp := Temperature{celsius: 20}
kelvin := tempKelvinLens.Get(temp) // 293.15 K
updated := tempKelvinLens.Set(Kelvin(300))(temp) // 26.85°C
# Comparison with Direct Lenses
While you can create a lens directly, using an isomorphism provides benefits:
1. Reusability: The isomorphism can be used in multiple contexts
2. Bidirectionality: The inverse transformation is explicitly available
3. Type Safety: Isomorphism laws ensure correctness
4. Composability: Isomorphisms compose naturally
Direct lens approach requires defining both get and set operations separately,
while the isomorphism approach defines the bidirectional transformation once
and converts it to a lens when needed.
# Performance Considerations
Converting an isomorphism to a lens has minimal overhead. The resulting lens
simply delegates to the isomorphism's Get and ReverseGet functions. However,
keep in mind:
1. Each Set operation performs a full transformation via ReverseGet
2. For pointer types, use IsoAsLensRef to ensure proper copying
3. The lens ignores the original structure in Set, using only the new value
# Function Reference
Conversion Functions:
- IsoAsLens: Convert Iso[S, A] to Lens[S, A] for value types
- IsoAsLensRef: Convert Iso[*S, A] to Lens[*S, A] for pointer types
# Related Packages
- github.com/IBM/fp-go/v2/optics/iso: Isomorphisms (bidirectional transformations)
- github.com/IBM/fp-go/v2/optics/lens: Lenses (focused accessors)
- github.com/IBM/fp-go/v2/optics/lens/iso: Convert lenses to isomorphisms (inverse operation)
- github.com/IBM/fp-go/v2/endomorphism: Endomorphisms (A → A functions)
- github.com/IBM/fp-go/v2/function: Function composition utilities
# Examples
Complete example with type wrappers:
type UserId int
type Username string
type User struct {
id UserId
name Username
}
// Isomorphism for UserId
userIdIso := iso.MakeIso(
func(u User) UserId { return u.id },
func(id UserId) User { return User{id: id, name: "Unknown"} },
)
// Isomorphism for Username
usernameIso := iso.MakeIso(
func(u User) Username { return u.name },
func(name Username) User { return User{id: 0, name: name} },
)
// Convert to lenses
idLens := lens.IsoAsLens(userIdIso)
nameLens := lens.IsoAsLens(usernameIso)
user := User{id: 42, name: "Alice"}
// Access and modify through lenses
id := idLens.Get(user) // 42
name := nameLens.Get(user) // "Alice"
renamed := nameLens.Set("Bob")(user) // User{id: 0, name: "Bob"}
reidentified := idLens.Set(UserId(100))(user) // User{id: 100, name: "Unknown"}
Note: When using Set with iso-based lenses, the entire structure is replaced
via ReverseGet, so other fields may be reset to default values. For partial
updates, use regular lenses instead.
*/
package lens
// Made with Bob

View File

@@ -18,16 +18,15 @@ package lens
import (
EM "github.com/IBM/fp-go/v2/endomorphism"
F "github.com/IBM/fp-go/v2/function"
I "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
)
// IsoAsLens converts an `Iso` to a `Lens`
func IsoAsLens[S, A any](sa I.Iso[S, A]) L.Lens[S, A] {
func IsoAsLens[S, A any](sa Iso[S, A]) Lens[S, A] {
return L.MakeLensCurried(sa.Get, F.Flow2(sa.ReverseGet, F.Flow2(F.Constant1[S, S], EM.Of[func(S) S])))
}
// IsoAsLensRef converts an `Iso` to a `Lens`
func IsoAsLensRef[S, A any](sa I.Iso[*S, A]) L.Lens[*S, A] {
func IsoAsLensRef[S, A any](sa Iso[*S, A]) Lens[*S, A] {
return L.MakeLensRefCurried(sa.Get, F.Flow2(sa.ReverseGet, F.Flow2(F.Constant1[*S, *S], EM.Of[func(*S) *S])))
}

View File

@@ -0,0 +1,401 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lens
import (
"testing"
F "github.com/IBM/fp-go/v2/function"
ISO "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
"github.com/stretchr/testify/assert"
)
// Test types
type Celsius float64
type Fahrenheit float64
type UserId int
type User struct {
id UserId
name string
}
type Meters float64
type Feet float64
// TestIsoAsLensBasic tests basic functionality of IsoAsLens
func TestIsoAsLensBasic(t *testing.T) {
// Create an isomorphism between Celsius and Fahrenheit
celsiusToFahrenheit := func(c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
fahrenheitToCelsius := func(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
tempIso := ISO.MakeIso(celsiusToFahrenheit, fahrenheitToCelsius)
tempLens := IsoAsLens(tempIso)
t.Run("Get", func(t *testing.T) {
celsius := Celsius(20.0)
fahrenheit := tempLens.Get(celsius)
assert.InDelta(t, 68.0, float64(fahrenheit), 0.001)
})
t.Run("Set", func(t *testing.T) {
celsius := Celsius(20.0)
newFahrenheit := Fahrenheit(86.0)
updated := tempLens.Set(newFahrenheit)(celsius)
assert.InDelta(t, 30.0, float64(updated), 0.001)
})
t.Run("SetPreservesOriginal", func(t *testing.T) {
original := Celsius(20.0)
newFahrenheit := Fahrenheit(86.0)
_ = tempLens.Set(newFahrenheit)(original)
// Original should be unchanged
assert.Equal(t, Celsius(20.0), original)
})
}
// TestIsoAsLensRefBasic tests basic functionality of IsoAsLensRef
func TestIsoAsLensRefBasic(t *testing.T) {
// Create an isomorphism for User pointer and UserId
userToId := func(u *User) UserId {
return u.id
}
idToUser := func(id UserId) *User {
return &User{id: id, name: "Unknown"}
}
userIdIso := ISO.MakeIso(userToId, idToUser)
userIdLens := IsoAsLensRef(userIdIso)
t.Run("Get", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
id := userIdLens.Get(user)
assert.Equal(t, UserId(42), id)
})
t.Run("Set", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
newId := UserId(100)
updated := userIdLens.Set(newId)(user)
assert.Equal(t, UserId(100), updated.id)
assert.Equal(t, "Unknown", updated.name) // ReverseGet creates new user
})
t.Run("SetCreatesNewPointer", func(t *testing.T) {
user := &User{id: 42, name: "Alice"}
newId := UserId(100)
updated := userIdLens.Set(newId)(user)
// Should be different pointers
assert.NotSame(t, user, updated)
// Original should be unchanged
assert.Equal(t, UserId(42), user.id)
assert.Equal(t, "Alice", user.name)
})
}
// TestIsoAsLensLaws verifies that IsoAsLens satisfies lens laws
func TestIsoAsLensLaws(t *testing.T) {
// Create a simple isomorphism
type Wrapper struct{ value int }
wrapperIso := ISO.MakeIso(
func(w Wrapper) int { return w.value },
func(i int) Wrapper { return Wrapper{value: i} },
)
lens := IsoAsLens(wrapperIso)
wrapper := Wrapper{value: 42}
newValue := 100
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := lens.Set(lens.Get(wrapper))(wrapper)
assert.Equal(t, wrapper, result)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := lens.Get(lens.Set(newValue)(wrapper))
assert.Equal(t, newValue, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
result1 := lens.Set(200)(lens.Set(newValue)(wrapper))
result2 := lens.Set(200)(wrapper)
assert.Equal(t, result2, result1)
})
}
// TestIsoAsLensRefLaws verifies that IsoAsLensRef satisfies lens laws
func TestIsoAsLensRefLaws(t *testing.T) {
type Wrapper struct{ value int }
wrapperIso := ISO.MakeIso(
func(w *Wrapper) int { return w.value },
func(i int) *Wrapper { return &Wrapper{value: i} },
)
lens := IsoAsLensRef(wrapperIso)
wrapper := &Wrapper{value: 42}
newValue := 100
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := lens.Set(lens.Get(wrapper))(wrapper)
assert.Equal(t, wrapper.value, result.value)
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := lens.Get(lens.Set(newValue)(wrapper))
assert.Equal(t, newValue, result)
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
result1 := lens.Set(200)(lens.Set(newValue)(wrapper))
result2 := lens.Set(200)(wrapper)
assert.Equal(t, result2.value, result1.value)
})
}
// TestIsoAsLensComposition tests composing iso-based lenses with other lenses
func TestIsoAsLensComposition(t *testing.T) {
type Temperature struct {
celsius Celsius
}
// Lens to access celsius field
celsiusFieldLens := L.MakeLens(
func(t Temperature) Celsius { return t.celsius },
func(t Temperature, c Celsius) Temperature {
t.celsius = c
return t
},
)
// Isomorphism between Celsius and Fahrenheit
celsiusToFahrenheit := func(c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
fahrenheitToCelsius := func(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
tempIso := ISO.MakeIso(celsiusToFahrenheit, fahrenheitToCelsius)
tempLens := IsoAsLens(tempIso)
// Compose to work with Fahrenheit directly from Temperature
composedLens := F.Pipe1(
celsiusFieldLens,
L.Compose[Temperature](tempLens),
)
temp := Temperature{celsius: 20}
t.Run("ComposedGet", func(t *testing.T) {
fahrenheit := composedLens.Get(temp)
assert.InDelta(t, 68.0, float64(fahrenheit), 0.001)
})
t.Run("ComposedSet", func(t *testing.T) {
newFahrenheit := Fahrenheit(86.0)
updated := composedLens.Set(newFahrenheit)(temp)
assert.InDelta(t, 30.0, float64(updated.celsius), 0.001)
})
}
// TestIsoAsLensModify tests using Modify with iso-based lenses
func TestIsoAsLensModify(t *testing.T) {
// Isomorphism between Meters and Feet
metersToFeet := func(m Meters) Feet {
return Feet(m * 3.28084)
}
feetToMeters := func(f Feet) Meters {
return Meters(f / 3.28084)
}
distanceIso := ISO.MakeIso(metersToFeet, feetToMeters)
distanceLens := IsoAsLens(distanceIso)
meters := Meters(10.0)
t.Run("ModifyDouble", func(t *testing.T) {
// Double the distance in feet, result in meters
doubleFeet := func(f Feet) Feet { return f * 2 }
modified := L.Modify[Meters](doubleFeet)(distanceLens)(meters)
assert.InDelta(t, 20.0, float64(modified), 0.001)
})
t.Run("ModifyIdentity", func(t *testing.T) {
// Identity modification should return same value
identity := func(f Feet) Feet { return f }
modified := L.Modify[Meters](identity)(distanceLens)(meters)
assert.InDelta(t, float64(meters), float64(modified), 0.001)
})
}
// TestIsoAsLensWithIdentityIso tests that identity iso creates identity lens
func TestIsoAsLensWithIdentityIso(t *testing.T) {
type Value int
idIso := ISO.Id[Value]()
idLens := IsoAsLens(idIso)
value := Value(42)
t.Run("IdentityGet", func(t *testing.T) {
result := idLens.Get(value)
assert.Equal(t, value, result)
})
t.Run("IdentitySet", func(t *testing.T) {
newValue := Value(100)
result := idLens.Set(newValue)(value)
assert.Equal(t, newValue, result)
})
}
// TestIsoAsLensRefWithIdentityIso tests identity iso with references
func TestIsoAsLensRefWithIdentityIso(t *testing.T) {
type Value struct{ n int }
idIso := ISO.Id[*Value]()
idLens := IsoAsLensRef(idIso)
value := &Value{n: 42}
t.Run("IdentityGet", func(t *testing.T) {
result := idLens.Get(value)
assert.Equal(t, value, result)
})
t.Run("IdentitySet", func(t *testing.T) {
newValue := &Value{n: 100}
result := idLens.Set(newValue)(value)
assert.Equal(t, newValue, result)
})
}
// TestIsoAsLensRoundTrip tests round-trip conversions
func TestIsoAsLensRoundTrip(t *testing.T) {
type Email string
type ValidatedEmail struct{ value Email }
emailIso := ISO.MakeIso(
func(ve ValidatedEmail) Email { return ve.value },
func(e Email) ValidatedEmail { return ValidatedEmail{value: e} },
)
emailLens := IsoAsLens(emailIso)
validated := ValidatedEmail{value: "user@example.com"}
t.Run("RoundTripThroughGet", func(t *testing.T) {
// Get the email, then Set it back
email := emailLens.Get(validated)
restored := emailLens.Set(email)(validated)
assert.Equal(t, validated, restored)
})
t.Run("RoundTripThroughSet", func(t *testing.T) {
// Set a new email, then Get it
newEmail := Email("admin@example.com")
updated := emailLens.Set(newEmail)(validated)
retrieved := emailLens.Get(updated)
assert.Equal(t, newEmail, retrieved)
})
}
// TestIsoAsLensWithComplexTypes tests with more complex type transformations
func TestIsoAsLensWithComplexTypes(t *testing.T) {
type Point struct {
x, y float64
}
type PolarCoord struct {
r, theta float64
}
// Isomorphism between Cartesian and Polar coordinates (simplified for testing)
cartesianToPolar := func(p Point) PolarCoord {
r := p.x*p.x + p.y*p.y
theta := 0.0 // Simplified
return PolarCoord{r: r, theta: theta}
}
polarToCartesian := func(pc PolarCoord) Point {
return Point{x: pc.r, y: pc.theta} // Simplified
}
coordIso := ISO.MakeIso(cartesianToPolar, polarToCartesian)
coordLens := IsoAsLens(coordIso)
point := Point{x: 3.0, y: 4.0}
t.Run("ComplexGet", func(t *testing.T) {
polar := coordLens.Get(point)
assert.NotNil(t, polar)
})
t.Run("ComplexSet", func(t *testing.T) {
newPolar := PolarCoord{r: 5.0, theta: 0.927}
updated := coordLens.Set(newPolar)(point)
assert.NotNil(t, updated)
})
}
// TestIsoAsLensTypeConversion tests type conversion scenarios
func TestIsoAsLensTypeConversion(t *testing.T) {
type StringWrapper string
type IntWrapper int
// Isomorphism that converts string length to int
strLenIso := ISO.MakeIso(
func(s StringWrapper) IntWrapper { return IntWrapper(len(s)) },
func(i IntWrapper) StringWrapper {
// Create a string of given length (simplified)
result := ""
for j := 0; j < int(i); j++ {
result += "x"
}
return StringWrapper(result)
},
)
strLenLens := IsoAsLens(strLenIso)
t.Run("StringToLength", func(t *testing.T) {
str := StringWrapper("hello")
length := strLenLens.Get(str)
assert.Equal(t, IntWrapper(5), length)
})
t.Run("LengthToString", func(t *testing.T) {
str := StringWrapper("hello")
newLength := IntWrapper(3)
updated := strLenLens.Set(newLength)(str)
assert.Equal(t, 3, len(updated))
})
}
// Made with Bob

View File

@@ -0,0 +1,11 @@
package lens
import (
"github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
)
type (
Lens[S, A any] = L.Lens[S, A]
Iso[S, A any] = iso.Iso[S, A]
)

View File

@@ -24,18 +24,18 @@ import (
)
// FromNillable converts a nillable value to an option and back
func FromNillable[T any]() I.Iso[*T, O.Option[T]] {
func FromNillable[T any]() Iso[*T, Option[T]] {
return I.MakeIso(F.Flow2(
O.FromPredicate(F.IsNonNil[T]),
O.Map(F.Deref[T]),
),
O.Fold(F.Constant((*T)(nil)), F.Ref[T]),
O.Fold(F.ConstNil[T], F.Ref[T]),
)
}
// Compose converts a Lens to a property of `A` into a lens to a property of type `B`
// the transformation is done via an ISO
func Compose[S, A, B any](ab I.Iso[A, B]) func(sa L.Lens[S, A]) L.Lens[S, B] {
func Compose[S, A, B any](ab Iso[A, B]) Operator[S, A, B] {
return F.Pipe2(
ab,
IL.IsoAsLens[A, B],

View File

@@ -0,0 +1,14 @@
package iso
import (
"github.com/IBM/fp-go/v2/optics/iso"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/option"
)
type (
Option[A any] = option.Option[A]
Iso[S, A any] = iso.Iso[S, A]
Lens[S, A any] = lens.Lens[S, A]
Operator[S, A, B any] = lens.Operator[S, A, B]
)

View File

@@ -435,7 +435,7 @@ func compose[GET ~func(S) B, SET ~func(S, B) S, S, A, B any](creator func(get GE
// person := Person{Name: "Alice", Address: Address{Street: "Main St"}}
// street := personStreetLens.Get(person) // "Main St"
// updated := personStreetLens.Set("Oak Ave")(person)
func Compose[S, A, B any](ab Lens[A, B]) func(Lens[S, A]) Lens[S, B] {
func Compose[S, A, B any](ab Lens[A, B]) Operator[S, A, B] {
return compose(MakeLens[func(S) B, func(S, B) S], ab)
}
@@ -477,7 +477,7 @@ func Compose[S, A, B any](ab Lens[A, B]) func(Lens[S, A]) Lens[S, B] {
// )
//
// personStreetLens := F.Pipe1(addressLens, lens.ComposeRef[Person](streetLens))
func ComposeRef[S, A, B any](ab Lens[A, B]) func(Lens[*S, A]) Lens[*S, B] {
func ComposeRef[S, A, B any](ab Lens[A, B]) Operator[*S, A, B] {
return compose(MakeLensRef[func(*S) B, func(*S, B) *S], ab)
}

View File

@@ -3,6 +3,7 @@ package option
import (
F "github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/optics/lens"
LI "github.com/IBM/fp-go/v2/optics/lens/iso"
O "github.com/IBM/fp-go/v2/option"
)
@@ -95,3 +96,69 @@ func FromOption[S, A any](defaultValue A) func(sa LensO[S, A]) Lens[S, A] {
func FromOptionRef[S, A any](defaultValue A) func(sa Lens[*S, Option[A]]) Lens[*S, A] {
return fromOption(lens.MakeLensRefCurried[S, A], defaultValue)
}
// FromIso converts a Lens[S, A] to a LensO[S, A] using an isomorphism.
//
// This function takes an isomorphism between A and Option[A] and uses it to
// transform a regular lens into an optional lens. It's particularly useful when
// you have a custom isomorphism that defines how to convert between a value
// and its optional representation.
//
// The isomorphism must satisfy the round-trip laws:
// 1. iso.ReverseGet(iso.Get(a)) == a for all a: A
// 2. iso.Get(iso.ReverseGet(opt)) == opt for all opt: Option[A]
//
// Type Parameters:
// - S: The structure type containing the field
// - A: The type of the field being focused on
//
// Parameters:
// - iso: An isomorphism between A and Option[A] that defines the conversion
//
// Returns:
// - A function that takes a Lens[S, A] and returns a LensO[S, A]
//
// Example:
//
// type Config struct {
// timeout int
// }
//
// // Create a lens to the timeout field
// timeoutLens := lens.MakeLens(
// func(c Config) int { return c.timeout },
// func(c Config, t int) Config { c.timeout = t; return c },
// )
//
// // Create an isomorphism that treats 0 as None
// zeroAsNone := iso.MakeIso(
// func(t int) option.Option[int] {
// if t == 0 {
// return option.None[int]()
// }
// return option.Some(t)
// },
// func(opt option.Option[int]) int {
// return option.GetOrElse(func() int { return 0 })(opt)
// },
// )
//
// // Convert to optional lens
// optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
//
// config := Config{timeout: 0}
// opt := optTimeoutLens.Get(config) // None[int]()
// updated := optTimeoutLens.Set(option.Some(30))(config) // Config{timeout: 30}
//
// Common Use Cases:
// - Converting between sentinel values (like 0, -1, "") and Option
// - Applying custom validation logic when converting to/from Option
// - Integrating with existing isomorphisms like FromNillable
//
// See also:
// - FromPredicate: For predicate-based optional conversion
// - FromNillable: For pointer-based optional conversion
// - FromOption: For converting from optional to non-optional with defaults
func FromIso[S, A any](iso Iso[A, Option[A]]) func(Lens[S, A]) LensO[S, A] {
return LI.Compose[S](iso)
}

View File

@@ -0,0 +1,481 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package option
import (
"testing"
EQT "github.com/IBM/fp-go/v2/eq/testing"
F "github.com/IBM/fp-go/v2/function"
ISO "github.com/IBM/fp-go/v2/optics/iso"
L "github.com/IBM/fp-go/v2/optics/lens"
LT "github.com/IBM/fp-go/v2/optics/lens/testing"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
// Test types
type Config struct {
timeout int
retries int
}
type Settings struct {
maxConnections int
bufferSize int
}
// TestFromIsoBasic tests basic functionality of FromIso
func TestFromIsoBasic(t *testing.T) {
// Create an isomorphism that treats 0 as None
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
// Create a lens to the timeout field
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
// Convert to optional lens using FromIso
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
t.Run("GetNone", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
result := optTimeoutLens.Get(config)
assert.True(t, O.IsNone(result))
})
t.Run("GetSome", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
result := optTimeoutLens.Get(config)
assert.True(t, O.IsSome(result))
assert.Equal(t, 30, O.GetOrElse(F.Constant(0))(result))
})
t.Run("SetNone", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
updated := optTimeoutLens.Set(O.None[int]())(config)
assert.Equal(t, 0, updated.timeout)
assert.Equal(t, 3, updated.retries) // Other fields unchanged
})
t.Run("SetSome", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
updated := optTimeoutLens.Set(O.Some(60))(config)
assert.Equal(t, 60, updated.timeout)
assert.Equal(t, 3, updated.retries) // Other fields unchanged
})
t.Run("SetPreservesOriginal", func(t *testing.T) {
original := Config{timeout: 30, retries: 3}
_ = optTimeoutLens.Set(O.Some(60))(original)
// Original should be unchanged
assert.Equal(t, 30, original.timeout)
assert.Equal(t, 3, original.retries)
})
}
// TestFromIsoWithNegativeSentinel tests using -1 as a sentinel value
func TestFromIsoWithNegativeSentinel(t *testing.T) {
// Create an isomorphism that treats -1 as None
negativeOneAsNone := ISO.MakeIso(
func(n int) O.Option[int] {
if n == -1 {
return O.None[int]()
}
return O.Some(n)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(-1))(opt)
},
)
retriesLens := L.MakeLens(
func(c Config) int { return c.retries },
func(c Config, r int) Config { c.retries = r; return c },
)
optRetriesLens := FromIso[Config, int](negativeOneAsNone)(retriesLens)
t.Run("GetNoneForNegativeOne", func(t *testing.T) {
config := Config{timeout: 30, retries: -1}
result := optRetriesLens.Get(config)
assert.True(t, O.IsNone(result))
})
t.Run("GetSomeForZero", func(t *testing.T) {
config := Config{timeout: 30, retries: 0}
result := optRetriesLens.Get(config)
assert.True(t, O.IsSome(result))
assert.Equal(t, 0, O.GetOrElse(F.Constant(-1))(result))
})
t.Run("SetNoneToNegativeOne", func(t *testing.T) {
config := Config{timeout: 30, retries: 5}
updated := optRetriesLens.Set(O.None[int]())(config)
assert.Equal(t, -1, updated.retries)
})
}
// TestFromIsoLaws verifies that FromIso satisfies lens laws
func TestFromIsoLaws(t *testing.T) {
// Create an isomorphism
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
eqOptInt := O.Eq(EQT.Eq[int]())
eqConfig := EQT.Eq[Config]()
config := Config{timeout: 30, retries: 3}
newValue := O.Some(60)
// Law 1: GetSet - lens.Set(lens.Get(s))(s) == s
t.Run("GetSetLaw", func(t *testing.T) {
result := optTimeoutLens.Set(optTimeoutLens.Get(config))(config)
assert.True(t, eqConfig.Equals(config, result))
})
// Law 2: SetGet - lens.Get(lens.Set(a)(s)) == a
t.Run("SetGetLaw", func(t *testing.T) {
result := optTimeoutLens.Get(optTimeoutLens.Set(newValue)(config))
assert.True(t, eqOptInt.Equals(newValue, result))
})
// Law 3: SetSet - lens.Set(a2)(lens.Set(a1)(s)) == lens.Set(a2)(s)
t.Run("SetSetLaw", func(t *testing.T) {
a1 := O.Some(60)
a2 := O.None[int]()
result1 := optTimeoutLens.Set(a2)(optTimeoutLens.Set(a1)(config))
result2 := optTimeoutLens.Set(a2)(config)
assert.True(t, eqConfig.Equals(result1, result2))
})
// Use the testing helper to verify all laws
t.Run("AllLaws", func(t *testing.T) {
laws := LT.AssertLaws(t, eqOptInt, eqConfig)(optTimeoutLens)
assert.True(t, laws(config, O.Some(100)))
assert.True(t, laws(Config{timeout: 0, retries: 5}, O.None[int]()))
})
}
// TestFromIsoComposition tests composing FromIso with other lenses
func TestFromIsoComposition(t *testing.T) {
type Application struct {
config Config
}
// Isomorphism for zero as none
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
// Lens to config field
configLens := L.MakeLens(
func(a Application) Config { return a.config },
func(a Application, c Config) Application { a.config = c; return a },
)
// Lens to timeout field
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
// Compose: Application -> Config -> timeout (as Option)
optTimeoutFromConfig := FromIso[Config, int](zeroAsNone)(timeoutLens)
optTimeoutFromApp := F.Pipe1(
configLens,
L.Compose[Application](optTimeoutFromConfig),
)
app := Application{config: Config{timeout: 0, retries: 3}}
t.Run("ComposedGet", func(t *testing.T) {
result := optTimeoutFromApp.Get(app)
assert.True(t, O.IsNone(result))
})
t.Run("ComposedSet", func(t *testing.T) {
updated := optTimeoutFromApp.Set(O.Some(45))(app)
assert.Equal(t, 45, updated.config.timeout)
assert.Equal(t, 3, updated.config.retries)
})
}
// TestFromIsoModify tests using Modify with FromIso-based lenses
func TestFromIsoModify(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
t.Run("ModifyNoneToSome", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
// Map None to Some(10)
modified := L.Modify[Config](O.Map(func(x int) int { return x + 10 }))(optTimeoutLens)(config)
// Since it was None, Map doesn't apply, stays None (0)
assert.Equal(t, 0, modified.timeout)
})
t.Run("ModifySomeValue", func(t *testing.T) {
config := Config{timeout: 30, retries: 3}
// Double the timeout value
modified := L.Modify[Config](O.Map(func(x int) int { return x * 2 }))(optTimeoutLens)(config)
assert.Equal(t, 60, modified.timeout)
})
t.Run("ModifyWithAlt", func(t *testing.T) {
config := Config{timeout: 0, retries: 3}
// Use Alt to provide a default
modified := L.Modify[Config](func(opt O.Option[int]) O.Option[int] {
return O.Alt(F.Constant(O.Some(10)))(opt)
})(optTimeoutLens)(config)
assert.Equal(t, 10, modified.timeout)
})
}
// TestFromIsoWithStringEmpty tests using empty string as None
func TestFromIsoWithStringEmpty(t *testing.T) {
type User struct {
name string
email string
}
// Isomorphism that treats empty string as None
emptyAsNone := ISO.MakeIso(
func(s string) O.Option[string] {
if s == "" {
return O.None[string]()
}
return O.Some(s)
},
func(opt O.Option[string]) string {
return O.GetOrElse(F.Constant(""))(opt)
},
)
emailLens := L.MakeLens(
func(u User) string { return u.email },
func(u User, e string) User { u.email = e; return u },
)
optEmailLens := FromIso[User, string](emptyAsNone)(emailLens)
t.Run("EmptyStringAsNone", func(t *testing.T) {
user := User{name: "Alice", email: ""}
result := optEmailLens.Get(user)
assert.True(t, O.IsNone(result))
})
t.Run("NonEmptyStringAsSome", func(t *testing.T) {
user := User{name: "Alice", email: "alice@example.com"}
result := optEmailLens.Get(user)
assert.True(t, O.IsSome(result))
assert.Equal(t, "alice@example.com", O.GetOrElse(F.Constant(""))(result))
})
t.Run("SetNoneToEmpty", func(t *testing.T) {
user := User{name: "Alice", email: "alice@example.com"}
updated := optEmailLens.Set(O.None[string]())(user)
assert.Equal(t, "", updated.email)
})
}
// TestFromIsoRoundTrip tests round-trip conversions
func TestFromIsoRoundTrip(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
maxConnectionsLens := L.MakeLens(
func(s Settings) int { return s.maxConnections },
func(s Settings, m int) Settings { s.maxConnections = m; return s },
)
optMaxConnectionsLens := FromIso[Settings, int](zeroAsNone)(maxConnectionsLens)
t.Run("RoundTripThroughGet", func(t *testing.T) {
settings := Settings{maxConnections: 100, bufferSize: 1024}
// Get the value, then Set it back
opt := optMaxConnectionsLens.Get(settings)
restored := optMaxConnectionsLens.Set(opt)(settings)
assert.Equal(t, settings, restored)
})
t.Run("RoundTripThroughSet", func(t *testing.T) {
settings := Settings{maxConnections: 0, bufferSize: 1024}
// Set a new value, then Get it
newOpt := O.Some(200)
updated := optMaxConnectionsLens.Set(newOpt)(settings)
retrieved := optMaxConnectionsLens.Get(updated)
assert.True(t, O.Eq(EQT.Eq[int]()).Equals(newOpt, retrieved))
})
t.Run("RoundTripWithNone", func(t *testing.T) {
settings := Settings{maxConnections: 100, bufferSize: 1024}
// Set None, then get it back
updated := optMaxConnectionsLens.Set(O.None[int]())(settings)
retrieved := optMaxConnectionsLens.Get(updated)
assert.True(t, O.IsNone(retrieved))
})
}
// TestFromIsoChaining tests chaining multiple FromIso transformations
func TestFromIsoChaining(t *testing.T) {
// Create two different isomorphisms
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
config := Config{timeout: 30, retries: 3}
t.Run("ChainedOperations", func(t *testing.T) {
// Chain multiple operations
result := F.Pipe2(
config,
optTimeoutLens.Set(O.Some(60)),
optTimeoutLens.Set(O.None[int]()),
)
assert.Equal(t, 0, result.timeout)
})
}
// TestFromIsoMultipleFields tests using FromIso on multiple fields
func TestFromIsoMultipleFields(t *testing.T) {
zeroAsNone := ISO.MakeIso(
func(t int) O.Option[int] {
if t == 0 {
return O.None[int]()
}
return O.Some(t)
},
func(opt O.Option[int]) int {
return O.GetOrElse(F.Constant(0))(opt)
},
)
timeoutLens := L.MakeLens(
func(c Config) int { return c.timeout },
func(c Config, t int) Config { c.timeout = t; return c },
)
retriesLens := L.MakeLens(
func(c Config) int { return c.retries },
func(c Config, r int) Config { c.retries = r; return c },
)
optTimeoutLens := FromIso[Config, int](zeroAsNone)(timeoutLens)
optRetriesLens := FromIso[Config, int](zeroAsNone)(retriesLens)
t.Run("IndependentFields", func(t *testing.T) {
config := Config{timeout: 0, retries: 5}
// Get both fields
timeoutOpt := optTimeoutLens.Get(config)
retriesOpt := optRetriesLens.Get(config)
assert.True(t, O.IsNone(timeoutOpt))
assert.True(t, O.IsSome(retriesOpt))
assert.Equal(t, 5, O.GetOrElse(F.Constant(0))(retriesOpt))
})
t.Run("SetBothFields", func(t *testing.T) {
config := Config{timeout: 0, retries: 0}
// Set both fields
updated := F.Pipe2(
config,
optTimeoutLens.Set(O.Some(30)),
optRetriesLens.Set(O.Some(3)),
)
assert.Equal(t, 30, updated.timeout)
assert.Equal(t, 3, updated.retries)
})
}
// Made with Bob

View File

@@ -17,6 +17,7 @@ package option
import (
"github.com/IBM/fp-go/v2/endomorphism"
"github.com/IBM/fp-go/v2/optics/iso"
"github.com/IBM/fp-go/v2/optics/lens"
"github.com/IBM/fp-go/v2/option"
)
@@ -91,4 +92,6 @@ type (
// optLens := lens.FromNillableRef(timeoutLens)
// // optLens is a LensO[*Config, *int]
LensO[S, A any] = Lens[S, Option[A]]
Iso[S, A any] = iso.Iso[S, A]
)

View File

@@ -80,4 +80,7 @@ type (
// with the focused value updated to a. The original structure is never modified.
Set func(a A) Endomorphism[S]
}
Kleisli[S, A, B any] = func(A) Lens[S, B]
Operator[S, A, B any] = Kleisli[S, Lens[S, A], B]
)

View File

@@ -17,6 +17,8 @@ package lens
import "github.com/IBM/fp-go/v2/optics/lens/option"
//go:generate go run ../../main.go lens --dir . --filename gen_lens.go
// fp-go:Lens
type Person struct {
Name string

View File

@@ -20,6 +20,7 @@ import (
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
O "github.com/IBM/fp-go/v2/option"
"github.com/stretchr/testify/assert"
)
@@ -153,3 +154,190 @@ func TestLensComposition(t *testing.T) {
assert.Equal(t, 55, olderCEO.CEO.Age)
assert.Equal(t, 50, company.CEO.Age) // Original unchanged
}
func TestPersonRefLensesIdempotent(t *testing.T) {
// Create a person pointer
person := &Person{
Name: "Alice",
Age: 30,
Email: "alice@example.com",
}
// Create ref lenses
refLenses := MakePersonRefLenses()
// Test that setting the same value returns the identical pointer (idempotent)
// This works because Name, Age, and Email use MakeLensStrict which has equality optimization
// Test Name field - setting same value should return same pointer
sameName := refLenses.Name.Set("Alice")(person)
assert.Same(t, person, sameName, "Setting Name to same value should return identical pointer")
// Test Age field - setting same value should return same pointer
sameAge := refLenses.Age.Set(30)(person)
assert.Same(t, person, sameAge, "Setting Age to same value should return identical pointer")
// Test Email field - setting same value should return same pointer
sameEmail := refLenses.Email.Set("alice@example.com")(person)
assert.Same(t, person, sameEmail, "Setting Email to same value should return identical pointer")
// Test that setting a different value creates a new pointer
differentName := refLenses.Name.Set("Bob")(person)
assert.NotSame(t, person, differentName, "Setting Name to different value should return new pointer")
assert.Equal(t, "Bob", differentName.Name)
assert.Equal(t, "Alice", person.Name, "Original should be unchanged")
differentAge := refLenses.Age.Set(31)(person)
assert.NotSame(t, person, differentAge, "Setting Age to different value should return new pointer")
assert.Equal(t, 31, differentAge.Age)
assert.Equal(t, 30, person.Age, "Original should be unchanged")
differentEmail := refLenses.Email.Set("bob@example.com")(person)
assert.NotSame(t, person, differentEmail, "Setting Email to different value should return new pointer")
assert.Equal(t, "bob@example.com", differentEmail.Email)
assert.Equal(t, "alice@example.com", person.Email, "Original should be unchanged")
}
func TestPersonRefLensesOptionalIdempotent(t *testing.T) {
// Test that setting an optional field to the same value returns the identical pointer
// This is important for performance and correctness in functional programming
// Test with Phone field set to a value
phoneValue := "555-1234"
person := &Person{
Name: "Alice",
Age: 30,
Email: "alice@example.com",
Phone: &phoneValue,
}
refLenses := MakePersonRefLenses()
// Test that setting Phone to the same value returns the same pointer
samePhone := refLenses.Phone.Set(O.Some(&phoneValue))(person)
assert.Same(t, person, samePhone, "Setting Phone to same value should return identical pointer")
// Test with Phone field set to nil
personNoPhone := &Person{
Name: "Bob",
Age: 25,
Email: "bob@example.com",
Phone: nil,
}
// Setting Phone to None when it's already nil should return same pointer
sameNilPhone := refLenses.Phone.Set(O.None[*string]())(personNoPhone)
assert.Same(t, personNoPhone, sameNilPhone, "Setting Phone to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newPhoneValue := "555-5678"
differentPhone := refLenses.Phone.Set(O.Some(&newPhoneValue))(person)
assert.NotSame(t, person, differentPhone, "Setting Phone to different value should return new pointer")
assert.Equal(t, &newPhoneValue, differentPhone.Phone)
assert.Equal(t, &phoneValue, person.Phone, "Original should be unchanged")
// Test setting from nil to Some creates new pointer
somePhone := refLenses.Phone.Set(O.Some(&phoneValue))(personNoPhone)
assert.NotSame(t, personNoPhone, somePhone, "Setting Phone from nil to Some should return new pointer")
assert.Equal(t, &phoneValue, somePhone.Phone)
assert.Nil(t, personNoPhone.Phone, "Original should be unchanged")
// Test setting from Some to None creates new pointer
nonePhone := refLenses.Phone.Set(O.None[*string]())(person)
assert.NotSame(t, person, nonePhone, "Setting Phone from Some to None should return new pointer")
assert.Nil(t, nonePhone.Phone)
assert.Equal(t, &phoneValue, person.Phone, "Original should be unchanged")
}
func TestAddressRefLensesOptionalIdempotent(t *testing.T) {
// Test Address.State optional field idempotency
stateValue := "California"
address := &Address{
Street: "123 Main St",
City: "Los Angeles",
ZipCode: "90001",
Country: "USA",
State: &stateValue,
}
refLenses := MakeAddressRefLenses()
// Test that setting State to the same value returns the same pointer
sameState := refLenses.State.Set(O.Some(&stateValue))(address)
assert.Same(t, address, sameState, "Setting State to same value should return identical pointer")
// Test with State field set to nil
addressNoState := &Address{
Street: "456 Oak Ave",
City: "Boston",
ZipCode: "02101",
Country: "USA",
State: nil,
}
// Setting State to None when it's already nil should return same pointer
sameNilState := refLenses.State.Set(O.None[*string]())(addressNoState)
assert.Same(t, addressNoState, sameNilState, "Setting State to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newStateValue := "New York"
differentState := refLenses.State.Set(O.Some(&newStateValue))(address)
assert.NotSame(t, address, differentState, "Setting State to different value should return new pointer")
assert.Equal(t, &newStateValue, differentState.State)
assert.Equal(t, &stateValue, address.State, "Original should be unchanged")
}
func TestCompanyRefLensesOptionalIdempotent(t *testing.T) {
// Test Company.Website optional field idempotency
websiteValue := "https://example.com"
company := &Company{
Name: "Tech Inc",
Address: Address{
Street: "789 Tech Blvd",
City: "San Francisco",
ZipCode: "94102",
Country: "USA",
},
CEO: Person{
Name: "Jane Doe",
Age: 45,
Email: "jane@techinc.com",
},
Website: &websiteValue,
}
refLenses := MakeCompanyRefLenses()
// Test that setting Website to the same value returns the same pointer
sameWebsite := refLenses.Website.Set(O.Some(&websiteValue))(company)
assert.Same(t, company, sameWebsite, "Setting Website to same value should return identical pointer")
// Test with Website field set to nil
companyNoWebsite := &Company{
Name: "Startup LLC",
Address: Address{
Street: "101 Innovation Way",
City: "Austin",
ZipCode: "78701",
Country: "USA",
},
CEO: Person{
Name: "John Smith",
Age: 35,
Email: "john@startup.com",
},
}
// Setting Website to None when it's already nil should return same pointer
sameNilWebsite := refLenses.Website.Set(O.None[*string]())(companyNoWebsite)
assert.Same(t, companyNoWebsite, sameNilWebsite, "Setting Website to None when already nil should return identical pointer")
// Test that setting to a different value creates a new pointer
newWebsiteValue := "https://newsite.com"
differentWebsite := refLenses.Website.Set(O.Some(&newWebsiteValue))(company)
assert.NotSame(t, company, differentWebsite, "Setting Website to different value should return new pointer")
assert.Equal(t, &newWebsiteValue, differentWebsite.Website)
assert.Equal(t, &websiteValue, company.Website, "Original should be unchanged")
}

View File

@@ -2,7 +2,7 @@ package lens
// Code generated by go generate; DO NOT EDIT.
// This file was generated by robots at
// 2025-11-07 16:52:17.4935733 +0100 CET m=+0.003883901
// 2025-11-12 18:15:07.69943 +0100 CET m=+0.005345401
import (
L "github.com/IBM/fp-go/v2/optics/lens"
@@ -53,24 +53,23 @@ func MakePersonLenses() PersonLenses {
// MakePersonRefLenses creates a new PersonRefLenses with lenses for all fields
func MakePersonRefLenses() PersonRefLenses {
isoPhone := I.FromZero[*string]()
return PersonRefLenses{
Name: L.MakeLensRef(
Name: L.MakeLensStrict(
func(s *Person) string { return s.Name },
func(s *Person, v string) *Person { s.Name = v; return s },
),
Age: L.MakeLensRef(
Age: L.MakeLensStrict(
func(s *Person) int { return s.Age },
func(s *Person, v int) *Person { s.Age = v; return s },
),
Email: L.MakeLensRef(
Email: L.MakeLensStrict(
func(s *Person) string { return s.Email },
func(s *Person, v string) *Person { s.Email = v; return s },
),
Phone: L.MakeLensRef(
func(s *Person) O.Option[*string] { return isoPhone.Get(s.Phone) },
func(s *Person, v O.Option[*string]) *Person { s.Phone = isoPhone.ReverseGet(v); return s },
),
Phone: LO.FromIso[*Person](I.FromZero[*string]())(L.MakeLensStrict(
func(s *Person) *string { return s.Phone },
func(s *Person, v *string) *Person { s.Phone = v; return s },
)),
}
}
@@ -121,28 +120,27 @@ func MakeAddressLenses() AddressLenses {
// MakeAddressRefLenses creates a new AddressRefLenses with lenses for all fields
func MakeAddressRefLenses() AddressRefLenses {
isoState := I.FromZero[*string]()
return AddressRefLenses{
Street: L.MakeLensRef(
Street: L.MakeLensStrict(
func(s *Address) string { return s.Street },
func(s *Address, v string) *Address { s.Street = v; return s },
),
City: L.MakeLensRef(
City: L.MakeLensStrict(
func(s *Address) string { return s.City },
func(s *Address, v string) *Address { s.City = v; return s },
),
ZipCode: L.MakeLensRef(
ZipCode: L.MakeLensStrict(
func(s *Address) string { return s.ZipCode },
func(s *Address, v string) *Address { s.ZipCode = v; return s },
),
Country: L.MakeLensRef(
Country: L.MakeLensStrict(
func(s *Address) string { return s.Country },
func(s *Address, v string) *Address { s.Country = v; return s },
),
State: L.MakeLensRef(
func(s *Address) O.Option[*string] { return isoState.Get(s.State) },
func(s *Address, v O.Option[*string]) *Address { s.State = isoState.ReverseGet(v); return s },
),
State: LO.FromIso[*Address](I.FromZero[*string]())(L.MakeLensStrict(
func(s *Address) *string { return s.State },
func(s *Address, v *string) *Address { s.State = v; return s },
)),
}
}
@@ -187,24 +185,23 @@ func MakeCompanyLenses() CompanyLenses {
// MakeCompanyRefLenses creates a new CompanyRefLenses with lenses for all fields
func MakeCompanyRefLenses() CompanyRefLenses {
isoWebsite := I.FromZero[*string]()
return CompanyRefLenses{
Name: L.MakeLensRef(
Name: L.MakeLensStrict(
func(s *Company) string { return s.Name },
func(s *Company, v string) *Company { s.Name = v; return s },
),
Address: L.MakeLensRef(
Address: L.MakeLensStrict(
func(s *Company) Address { return s.Address },
func(s *Company, v Address) *Company { s.Address = v; return s },
),
CEO: L.MakeLensRef(
CEO: L.MakeLensStrict(
func(s *Company) Person { return s.CEO },
func(s *Company, v Person) *Company { s.CEO = v; return s },
),
Website: L.MakeLensRef(
func(s *Company) O.Option[*string] { return isoWebsite.Get(s.Website) },
func(s *Company, v O.Option[*string]) *Company { s.Website = isoWebsite.ReverseGet(v); return s },
),
Website: LO.FromIso[*Company](I.FromZero[*string]())(L.MakeLensStrict(
func(s *Company) *string { return s.Website },
func(s *Company, v *string) *Company { s.Website = v; return s },
)),
}
}
@@ -237,15 +234,14 @@ func MakeCheckOptionLenses() CheckOptionLenses {
// MakeCheckOptionRefLenses creates a new CheckOptionRefLenses with lenses for all fields
func MakeCheckOptionRefLenses() CheckOptionRefLenses {
isoValue := I.FromZero[string]()
return CheckOptionRefLenses{
Name: L.MakeLensRef(
func(s *CheckOption) option.Option[string] { return s.Name },
func(s *CheckOption, v option.Option[string]) *CheckOption { s.Name = v; return s },
),
Value: L.MakeLensRef(
func(s *CheckOption) O.Option[string] { return isoValue.Get(s.Value) },
func(s *CheckOption, v O.Option[string]) *CheckOption { s.Value = isoValue.ReverseGet(v); return s },
),
Value: LO.FromIso[*CheckOption](I.FromZero[string]())(L.MakeLensStrict(
func(s *CheckOption) string { return s.Value },
func(s *CheckOption, v string) *CheckOption { s.Value = v; return s },
)),
}
}