1
0
mirror of https://github.com/IBM/fp-go.git synced 2026-03-10 13:31:01 +02:00

Compare commits

...

11 Commits

Author SHA1 Message Date
Dr. Carsten Leue
a0910b8279 fix: add -coverpkg=./... to v2
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 23:34:17 +01:00
Dr. Carsten Leue
029d7be52d fix: better collection of coverage results
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 23:32:14 +01:00
Dr. Carsten Leue
c6d30bb642 fix: increase test timeout
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 23:21:27 +01:00
Dr. Carsten Leue
1821f00fbe fix: introduce effect.LocalReaderK
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 22:52:20 +01:00
Dr. Carsten Leue
f0ec0b2541 fix: optimize record performance
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 22:20:19 +01:00
Dr. Carsten Leue
ce3c7d9359 fix: documentation of endomorphism
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 22:02:11 +01:00
Dr. Carsten Leue
3ed354cc8c fix: implement endomorphism.Read
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 19:01:22 +01:00
Dr. Carsten Leue
0932c8c464 fix: add tests for totality and move skills
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-08 14:12:41 +01:00
Dr. Carsten Leue
475d09e987 fix: add skills
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-07 22:39:33 +01:00
Dr. Carsten Leue
fd21bdeabf fix: signature of local for context/readerresult
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-07 22:03:17 +01:00
Dr. Carsten Leue
6834f72856 fix: make signature of Local for context more generic, but backwards compatible
Signed-off-by: Dr. Carsten Leue <carsten.leue@de.ibm.com>
2026-03-07 21:02:24 +01:00
29 changed files with 3829 additions and 99 deletions

View File

@@ -39,7 +39,7 @@ jobs:
- name: Run tests
run: |
go mod tidy
go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
go test -v -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./... ./...
- name: Upload coverage to Coveralls
continue-on-error: true
@@ -79,7 +79,7 @@ jobs:
run: |
cd v2
go mod tidy
go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
go test -v -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./... ./...
- name: Upload coverage to Coveralls
continue-on-error: true

318
skills/fp-go-http/SKILL.md Normal file
View File

@@ -0,0 +1,318 @@
# fp-go HTTP Requests
## Overview
fp-go wraps `net/http` in the `ReaderIOResult` monad, giving you composable, context-aware HTTP operations with automatic error propagation. The core package is:
```
github.com/IBM/fp-go/v2/context/readerioresult/http
```
All HTTP operations are lazy — they describe what to do but do not execute until you call the resulting function with a `context.Context`.
## Core Types
```go
// Requester builds an *http.Request given a context.
type Requester = ReaderIOResult[*http.Request] // func(context.Context) func() result.Result[*http.Request]
// Client executes a Requester and returns the response wrapped in ReaderIOResult.
type Client interface {
Do(Requester) ReaderIOResult[*http.Response]
}
```
## Basic Usage
### 1. Create a Client
```go
import (
HTTP "net/http"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
)
client := H.MakeClient(HTTP.DefaultClient)
// Or with a custom client:
custom := &HTTP.Client{Timeout: 10 * time.Second}
client := H.MakeClient(custom)
```
### 2. Build a Request
```go
// GET request (most common)
req := H.MakeGetRequest("https://api.example.com/users/1")
// Arbitrary method + body
req := H.MakeRequest("POST", "https://api.example.com/users", bodyReader)
```
### 3. Execute and Parse
```go
import (
"context"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
)
type User struct {
ID int `json:"id"`
Name string `json:"name"`
}
client := H.MakeClient(HTTP.DefaultClient)
// ReadJSON validates status, Content-Type, then unmarshals JSON
result := H.ReadJSON[User](client)(H.MakeGetRequest("https://api.example.com/users/1"))
// Execute — provide context once
user, err := result(context.Background())()
```
## Response Readers
All accept a `Client` and return a function `Requester → ReaderIOResult[A]`:
| Function | Returns | Notes |
|----------|---------|-------|
| `ReadJSON[A](client)` | `ReaderIOResult[A]` | Validates status + Content-Type, unmarshals JSON |
| `ReadText(client)` | `ReaderIOResult[string]` | Validates status, reads body as UTF-8 string |
| `ReadAll(client)` | `ReaderIOResult[[]byte]` | Validates status, returns raw body bytes |
| `ReadFullResponse(client)` | `ReaderIOResult[FullResponse]` | Returns `Pair[*http.Response, []byte]` |
`FullResponse = Pair[*http.Response, []byte]` — use `pair.First` / `pair.Second` to access components.
## Composing Requests in Pipelines
```go
import (
F "github.com/IBM/fp-go/v2/function"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
IO "github.com/IBM/fp-go/v2/io"
)
client := H.MakeClient(HTTP.DefaultClient)
readPost := H.ReadJSON[Post](client)
pipeline := F.Pipe2(
H.MakeGetRequest("https://jsonplaceholder.typicode.com/posts/1"),
readPost,
RIO.ChainFirstIOK(IO.Logf[Post]("Got post: %v")),
)
post, err := pipeline(context.Background())()
```
## Parallel Requests — Homogeneous Types
Use `RIO.TraverseArray` when all requests return the same type:
```go
import (
A "github.com/IBM/fp-go/v2/array"
F "github.com/IBM/fp-go/v2/function"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
IO "github.com/IBM/fp-go/v2/io"
)
type PostItem struct {
UserID uint `json:"userId"`
ID uint `json:"id"`
Title string `json:"title"`
}
client := H.MakeClient(HTTP.DefaultClient)
readPost := H.ReadJSON[PostItem](client)
// Fetch 10 posts in parallel
data := F.Pipe3(
A.MakeBy(10, func(i int) string {
return fmt.Sprintf("https://jsonplaceholder.typicode.com/posts/%d", i+1)
}),
RIO.TraverseArray(F.Flow3(
H.MakeGetRequest,
readPost,
RIO.ChainFirstIOK(IO.Logf[PostItem]("Post: %v")),
)),
RIO.ChainFirstIOK(IO.Logf[[]PostItem]("All posts: %v")),
RIO.Map(A.Size[PostItem]),
)
count, err := data(context.Background())()
```
## Parallel Requests — Heterogeneous Types
Use `RIO.TraverseTuple2` (or `Tuple3`, etc.) when requests return different types:
```go
import (
T "github.com/IBM/fp-go/v2/tuple"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
F "github.com/IBM/fp-go/v2/function"
)
type CatFact struct {
Fact string `json:"fact"`
}
client := H.MakeClient(HTTP.DefaultClient)
readPost := H.ReadJSON[PostItem](client)
readCatFact := H.ReadJSON[CatFact](client)
// Execute both requests in parallel with different response types
data := F.Pipe3(
T.MakeTuple2(
"https://jsonplaceholder.typicode.com/posts/1",
"https://catfact.ninja/fact",
),
T.Map2(H.MakeGetRequest, H.MakeGetRequest), // build both requesters
RIO.TraverseTuple2(readPost, readCatFact), // run in parallel, typed
RIO.ChainFirstIOK(IO.Logf[T.Tuple2[PostItem, CatFact]]("Result: %v")),
)
both, err := data(context.Background())()
// both.F1 is PostItem, both.F2 is CatFact
```
## Building Requests with the Builder API
For complex requests (custom headers, query params, JSON body), use the builder:
```go
import (
B "github.com/IBM/fp-go/v2/http/builder"
RB "github.com/IBM/fp-go/v2/context/readerioresult/http/builder"
F "github.com/IBM/fp-go/v2/function"
)
// GET with query parameters
req := F.Pipe2(
B.Default,
B.WithURL("https://api.example.com/items?page=1"),
B.WithQueryArg("limit")("50"),
)
requester := RB.Requester(req)
// POST with JSON body
req := F.Pipe3(
B.Default,
B.WithURL("https://api.example.com/users"),
B.WithMethod("POST"),
B.WithJSON(map[string]string{"name": "Alice"}),
// sets Content-Type: application/json automatically
)
requester := RB.Requester(req)
// With authentication and custom headers
req := F.Pipe3(
B.Default,
B.WithURL("https://api.example.com/protected"),
B.WithBearer("my-token"), // sets Authorization: Bearer my-token
B.WithHeader("X-Request-ID")("123"),
)
requester := RB.Requester(req)
// Execute
result := H.ReadJSON[Response](client)(requester)
data, err := result(ctx)()
```
### Builder Functions
| Function | Effect |
|----------|--------|
| `B.WithURL(url)` | Set the target URL |
| `B.WithMethod(method)` | Set HTTP method (GET, POST, PUT, DELETE, …) |
| `B.WithJSON(v)` | Marshal `v` as JSON body, set `Content-Type: application/json` |
| `B.WithBytes(data)` | Set raw bytes body, set `Content-Length` automatically |
| `B.WithHeader(key)(value)` | Add a request header |
| `B.WithBearer(token)` | Set `Authorization: Bearer <token>` |
| `B.WithQueryArg(key)(value)` | Append a query parameter |
## Error Handling
Errors from request creation, HTTP status codes, Content-Type validation, and JSON parsing all propagate automatically through the `Result` monad. You only handle errors at the call site:
```go
// Pattern 1: direct extraction
value, err := pipeline(ctx)()
if err != nil { /* handle */ }
// Pattern 2: Fold for clean HTTP handler
RIO.Fold(
func(err error) { http.Error(w, err.Error(), http.StatusInternalServerError) },
func(data MyType) { json.NewEncoder(w).Encode(data) },
)(pipeline)(ctx)()
```
## Full HTTP Handler Example
```go
package main
import (
"context"
"encoding/json"
"net/http"
HTTP "net/http"
"fmt"
F "github.com/IBM/fp-go/v2/function"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
IO "github.com/IBM/fp-go/v2/io"
)
type Post struct {
ID int `json:"id"`
Title string `json:"title"`
}
var client = H.MakeClient(HTTP.DefaultClient)
func fetchPost(id int) RIO.ReaderIOResult[Post] {
url := fmt.Sprintf("https://jsonplaceholder.typicode.com/posts/%d", id)
return F.Pipe2(
H.MakeGetRequest(url),
H.ReadJSON[Post](client),
RIO.ChainFirstIOK(IO.Logf[Post]("fetched: %v")),
)
}
func handler(w http.ResponseWriter, r *http.Request) {
RIO.Fold(
func(err error) {
http.Error(w, err.Error(), http.StatusBadGateway)
},
func(post Post) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(post)
},
)(fetchPost(1))(r.Context())()
}
```
## Import Reference
```go
import (
HTTP "net/http"
H "github.com/IBM/fp-go/v2/context/readerioresult/http"
RB "github.com/IBM/fp-go/v2/context/readerioresult/http/builder"
B "github.com/IBM/fp-go/v2/http/builder"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
A "github.com/IBM/fp-go/v2/array"
T "github.com/IBM/fp-go/v2/tuple"
IO "github.com/IBM/fp-go/v2/io"
)
```
Requires Go 1.24+.

View File

@@ -0,0 +1,410 @@
# fp-go Logging
## Overview
fp-go provides logging utilities that integrate naturally with functional pipelines. Logging is always a **side effect** — it should not change the value being processed. The library achieves this through `ChainFirst`-style combinators that thread the original value through unchanged while performing the log.
## Packages
| Package | Purpose |
|---------|---------|
| `github.com/IBM/fp-go/v2/logging` | Global logger, context-embedded logger, `LoggingCallbacks` |
| `github.com/IBM/fp-go/v2/io` | `Logf`, `Logger`, `LogGo`, `Printf`, `PrintGo` — IO-level logging helpers |
| `github.com/IBM/fp-go/v2/readerio` | `SLog`, `SLogWithCallback` — structured logging for ReaderIO |
| `github.com/IBM/fp-go/v2/context/readerio` | `SLog`, `SLogWithCallback` — structured logging for context ReaderIO |
| `github.com/IBM/fp-go/v2/context/readerresult` | `SLog`, `TapSLog`, `SLogWithCallback` — structured logging for ReaderResult |
| `github.com/IBM/fp-go/v2/context/readerioresult` | `SLog`, `TapSLog`, `SLogWithCallback`, `LogEntryExit`, `LogEntryExitWithCallback` — full suite for ReaderIOResult |
## Logging Inside Pipelines
The idiomatic way to log inside a monadic pipeline is `ChainFirstIOK` (or `ChainFirst` where the monad is already IO). These combinators execute a side-effecting function and pass the **original value** downstream unchanged.
### With `IOResult` / `ReaderIOResult` — printf-style
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
IO "github.com/IBM/fp-go/v2/io"
F "github.com/IBM/fp-go/v2/function"
)
pipeline := F.Pipe3(
fetchUser(42),
RIO.ChainEitherK(validateUser),
// Log after validation — value flows through unchanged
RIO.ChainFirstIOK(IO.Logf[User]("Validated user: %v")),
RIO.Map(enrichUser),
)
```
`IO.Logf[A](format string) func(A) IO[A]` logs using `log.Printf` and returns the value unchanged. It's a Kleisli arrow suitable for `ChainFirst` and `ChainFirstIOK`.
### With `IOEither` / plain `IO`
```go
import (
IOE "github.com/IBM/fp-go/v2/ioeither"
IO "github.com/IBM/fp-go/v2/io"
F "github.com/IBM/fp-go/v2/function"
)
pipeline := F.Pipe3(
file.ReadFile("config.json"),
IOE.ChainEitherK(J.Unmarshal[Config]),
IOE.ChainFirstIOK(IO.Logf[Config]("Loaded config: %v")),
IOE.Map[error](processConfig),
)
```
### Logging Arrays in TraverseArray
```go
import (
A "github.com/IBM/fp-go/v2/array"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
IO "github.com/IBM/fp-go/v2/io"
F "github.com/IBM/fp-go/v2/function"
)
// Log each item individually, then log the final slice
pipeline := F.Pipe2(
A.MakeBy(3, idxToFilename),
RIO.TraverseArray(F.Flow3(
file.ReadFile,
RIO.ChainEitherK(J.Unmarshal[Record]),
RIO.ChainFirstIOK(IO.Logf[Record]("Parsed record: %v")),
)),
RIO.ChainFirstIOK(IO.Logf[[]Record]("All records: %v")),
)
```
## IO Logging Functions
All live in `github.com/IBM/fp-go/v2/io`:
### `Logf` — printf-style
```go
IO.Logf[A any](format string) func(A) IO[A]
```
Uses `log.Printf`. The format string works like `fmt.Sprintf`.
```go
IO.Logf[User]("Processing user: %+v")
IO.Logf[int]("Count: %d")
```
### `Logger` — with custom `*log.Logger`
```go
IO.Logger[A any](loggers ...*log.Logger) func(prefix string) func(A) IO[A]
```
Uses `logger.Printf(prefix+": %v", value)`. Pass your own `*log.Logger` instance.
```go
customLog := log.New(os.Stderr, "APP ", log.LstdFlags)
logUser := IO.Logger[User](customLog)("user")
// logs: "APP user: {ID:42 Name:Alice}"
```
### `LogGo` — Go template syntax
```go
IO.LogGo[A any](tmpl string) func(A) IO[A]
```
Uses Go's `text/template`. The template receives the value as `.`.
```go
type User struct{ Name string; Age int }
IO.LogGo[User]("User {{.Name}} is {{.Age}} years old")
```
### `Printf` / `PrintGo` — stdout instead of log
Same signatures as `Logf` / `LogGo` but use `fmt.Printf`/`fmt.Println` (no log prefix, no timestamp).
```go
IO.Printf[Result]("Result: %v\n")
IO.PrintGo[User]("Name: {{.Name}}")
```
## Structured Logging in the `context` Package
The `context/readerioresult`, `context/readerresult`, and `context/readerio` packages provide structured `slog`-based logging functions that are context-aware: they retrieve the logger from the context (via `logging.GetLoggerFromContext`) rather than using a fixed logger instance.
### `TapSLog` — inline structured logging in a ReaderIOResult pipeline
`TapSLog` is an **Operator** (`func(ReaderIOResult[A]) ReaderIOResult[A]`). It sits directly in a `F.Pipe` call on a `ReaderIOResult`, logs the current value or error using `slog`, and passes the result through unchanged.
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
)
pipeline := F.Pipe4(
fetchOrder(orderID),
RIO.TapSLog[Order]("Order fetched"), // logs value=<Order> or error=<err>
RIO.Chain(validateOrder),
RIO.TapSLog[Order]("Order validated"),
RIO.Chain(processPayment),
)
result, err := pipeline(ctx)()
```
- Logs **both** success values (`value=<A>`) and errors (`error=<err>`) using `slog` structured attributes.
- Respects the logger level — if the logger is configured to discard Info-level logs, nothing is written.
- Available in both `context/readerioresult` and `context/readerresult`.
### `SLog` — Kleisli-style structured logging
`SLog` is a **Kleisli arrow** (`func(Result[A]) ReaderResult[A]` / `func(Result[A]) ReaderIOResult[A]`). It is used with `Chain` when you want to intercept the raw `Result` directly.
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
)
pipeline := F.Pipe3(
fetchData(id),
RIO.Chain(RIO.SLog[Data]("Data fetched")), // log raw Result, pass it through
RIO.Chain(validateData),
RIO.Chain(RIO.SLog[Data]("Data validated")),
RIO.Chain(processData),
)
```
**Difference from `TapSLog`:**
- `TapSLog[A](msg)` is an `Operator[A, A]` — used directly in `F.Pipe` on a `ReaderIOResult[A]`.
- `SLog[A](msg)` is a `Kleisli[Result[A], A]` — used with `Chain`, giving access to the raw `Result[A]`.
Both log in the same format. `TapSLog` is more ergonomic in most pipelines.
### `SLogWithCallback` — custom log level and logger source
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
"log/slog"
)
// Log at DEBUG level with a custom logger extracted from context
debugLog := RIO.SLogWithCallback[User](
slog.LevelDebug,
logging.GetLoggerFromContext, // or any func(context.Context) *slog.Logger
"Fetched user",
)
pipeline := F.Pipe2(
fetchUser(123),
RIO.Chain(debugLog),
RIO.Map(func(u User) string { return u.Name }),
)
```
### `LogEntryExit` — automatic entry/exit timing with correlation IDs
`LogEntryExit` wraps a `ReaderIOResult` computation with structured entry and exit log messages. It assigns a unique **correlation ID** (`ID=<n>`) to each invocation so concurrent or nested operations can be correlated in logs.
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
)
pipeline := F.Pipe3(
fetchUser(123),
RIO.LogEntryExit[User]("fetchUser"), // wraps the operation
RIO.Chain(func(user User) RIO.ReaderIOResult[[]Order] {
return F.Pipe1(
fetchOrders(user.ID),
RIO.LogEntryExit[[]Order]("fetchOrders"),
)
}),
)
result, err := pipeline(ctx)()
// Logs:
// level=INFO msg="[entering]" name=fetchUser ID=1
// level=INFO msg="[exiting ]" name=fetchUser ID=1 duration=42ms
// level=INFO msg="[entering]" name=fetchOrders ID=2
// level=INFO msg="[exiting ]" name=fetchOrders ID=2 duration=18ms
```
On error, the exit log changes to `[throwing]` and includes the error:
```
level=INFO msg="[throwing]" name=fetchUser ID=3 duration=5ms error="user not found"
```
Key properties:
- **Correlation ID** (`ID=`) is unique per operation, monotonically increasing, and stored in the context so nested operations can access the parent's ID.
- **Duration** (`duration=`) is measured from entry to exit.
- **Logger is taken from the context** — embed a request-scoped logger with `logging.WithLogger` before executing the pipeline and `LogEntryExit` picks it up automatically.
- **Level-aware** — if the logger does not have the log level enabled, the entire entry/exit instrumentation is skipped (zero overhead).
- The original `ReaderIOResult[A]` value flows through **unchanged**.
```go
// Use a context logger so all log messages carry request metadata
cancelFn, ctxWithLogger := pair.Unpack(
logging.WithLogger(
slog.Default().With("requestID", r.Header.Get("X-Request-ID")),
)(r.Context()),
)
defer cancelFn()
result, err := pipeline(ctxWithLogger)()
```
### `LogEntryExitWithCallback` — custom log level
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
"log/slog"
)
// Log at DEBUG level instead of INFO
debugPipeline := F.Pipe1(
expensiveComputation(),
RIO.LogEntryExitWithCallback[Result](
slog.LevelDebug,
logging.GetLoggerFromContext,
"expensiveComputation",
),
)
```
### `SLog` / `SLogWithCallback` in `context/readerresult`
The same `SLog` and `TapSLog` functions are also available in `context/readerresult` for use with the synchronous `ReaderResult[A] = func(context.Context) (A, error)` monad:
```go
import RR "github.com/IBM/fp-go/v2/context/readerresult"
pipeline := F.Pipe3(
queryDB(id),
RR.TapSLog[Row]("Row fetched"),
RR.Chain(parseRow),
RR.TapSLog[Record]("Record parsed"),
)
```
## Global Logger (`logging` package)
The `logging` package manages a global `*slog.Logger` (structured logging, Go 1.21+).
```go
import "github.com/IBM/fp-go/v2/logging"
// Get the current global logger (defaults to slog.Default())
logger := logging.GetLogger()
logger.Info("application started", "version", "1.0")
// Replace the global logger; returns the old one for deferred restore
old := logging.SetLogger(slog.New(slog.NewJSONHandler(os.Stdout, nil)))
defer logging.SetLogger(old)
```
## Context-Embedded Logger
Embed a `*slog.Logger` in a `context.Context` to carry request-scoped loggers across the call stack. All context-package logging functions (`TapSLog`, `SLog`, `LogEntryExit`) pick up this logger automatically.
```go
import (
"github.com/IBM/fp-go/v2/logging"
"github.com/IBM/fp-go/v2/pair"
"log/slog"
)
// Create a request-scoped logger
reqLogger := slog.Default().With("requestID", "abc-123")
// Embed it into a context using the Kleisli arrow WithLogger
cancelFn, ctxWithLogger := pair.Unpack(logging.WithLogger(reqLogger)(ctx))
defer cancelFn()
// All downstream logging (TapSLog, LogEntryExit, etc.) uses reqLogger
result, err := pipeline(ctxWithLogger)()
```
`WithLogger` returns a `ContextCancel = Pair[context.CancelFunc, context.Context]`. The cancel function is a no-op — the context is only enriched, not made cancellable.
`GetLoggerFromContext` falls back to the global logger if no logger is found in the context.
## `LoggingCallbacks` — Dual-Logger Pattern
```go
import "github.com/IBM/fp-go/v2/logging"
// Returns (infoCallback, errorCallback) — both are func(string, ...any)
infoLog, errLog := logging.LoggingCallbacks() // use log.Default() for both
infoLog, errLog := logging.LoggingCallbacks(myLogger) // same logger for both
infoLog, errLog := logging.LoggingCallbacks(infoLog, errorLog) // separate loggers
```
Used internally by `io.Logger` and by packages that need separate info/error sinks.
## Choosing the Right Logging Function
| Situation | Use |
|-----------|-----|
| Quick printf logging mid-pipeline | `IO.Logf[A]("fmt")` with `ChainFirstIOK` |
| Go template formatting mid-pipeline | `IO.LogGo[A]("tmpl")` with `ChainFirstIOK` |
| Print to stdout (no log prefix) | `IO.Printf[A]("fmt")` with `ChainFirstIOK` |
| Structured slog — log value or error inline | `RIO.TapSLog[A]("msg")` (Operator, used in Pipe) |
| Structured slog — intercept raw Result | `RIO.Chain(RIO.SLog[A]("msg"))` (Kleisli) |
| Structured slog — custom log level | `RIO.SLogWithCallback[A](level, cb, "msg")` |
| Entry/exit timing + correlation IDs | `RIO.LogEntryExit[A]("name")` |
| Entry/exit at custom log level | `RIO.LogEntryExitWithCallback[A](level, cb, "name")` |
| Structured logging globally | `logging.GetLogger()` / `logging.SetLogger()` |
| Request-scoped logger in context | `logging.WithLogger(logger)` + `logging.GetLoggerFromContext(ctx)` |
| Custom `*log.Logger` in pipeline | `IO.Logger[A](logger)("prefix")` with `ChainFirstIOK` |
## Complete Example
```go
package main
import (
"context"
"log/slog"
"os"
F "github.com/IBM/fp-go/v2/function"
IO "github.com/IBM/fp-go/v2/io"
L "github.com/IBM/fp-go/v2/logging"
P "github.com/IBM/fp-go/v2/pair"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
)
func main() {
// Configure JSON structured logging globally
L.SetLogger(slog.New(slog.NewJSONHandler(os.Stdout, nil)))
// Embed a request-scoped logger into the context
_, ctx := P.Unpack(L.WithLogger(
L.GetLogger().With("requestID", "req-001"),
)(context.Background()))
pipeline := F.Pipe5(
fetchData(42),
RIO.LogEntryExit[Data]("fetchData"), // entry/exit with timing + ID
RIO.TapSLog[Data]("raw data"), // inline structured value log
RIO.ChainEitherK(transformData),
RIO.LogEntryExit[Result]("transformData"),
RIO.ChainFirstIOK(IO.LogGo[Result]("result: {{.Value}}")), // template log
)
value, err := pipeline(ctx)()
if err != nil {
L.GetLogger().Error("pipeline failed", "error", err)
}
_ = value
}
```

View File

@@ -0,0 +1,520 @@
# fp-go Monadic Operations
## Overview
`fp-go` (import path `github.com/IBM/fp-go/v2`) brings type-safe functional programming to Go using generics. Every monad follows a **consistent interface**: once you know the pattern in one monad, it transfers to all others.
All functions use the **data-last** principle: the data being transformed is always the last argument, enabling partial application and pipeline composition.
## Core Types
| Type | Package | Represents |
|------|---------|------------|
| `Option[A]` | `option` | A value that may or may not be present (replaces nil) |
| `Either[E, A]` | `either` | A value that is either a left error `E` or a right success `A` |
| `Result[A]` | `result` | `Either[error, A]` — shorthand for the common case |
| `IO[A]` | `io` | A lazy computation that produces `A` (possibly with side effects) |
| `IOResult[A]` | `ioresult` | `IO[Result[A]]` — lazy computation that can fail |
| `ReaderIOResult[A]` | `context/readerioresult` | `func(context.Context) IOResult[A]` — context-aware IO with errors |
| `Effect[C, A]` | `effect` | `func(C) ReaderIOResult[A]` — typed dependency injection + IO + errors |
Idiomatic (high-performance, tuple-based) equivalents live in `idiomatic/`:
- `idiomatic/option``(A, bool)` tuples
- `idiomatic/result``(A, error)` tuples
- `idiomatic/ioresult``func() (A, error)`
- `idiomatic/context/readerresult``func(context.Context) (A, error)`
## Standard Operations
Every monad exports these operations (PascalCase for exported Go names):
| fp-go | fp-ts / Haskell | Description |
|-------|----------------|-------------|
| `Of` | `of` / `pure` | Lift a pure value into the monad |
| `Map` | `map` / `fmap` | Transform the value inside without changing the context |
| `Chain` | `chain` / `>>=` | Sequence a computation that itself returns a monadic value |
| `Ap` | `ap` / `<*>` | Apply a wrapped function to a wrapped value |
| `Fold` | `fold` / `either` | Eliminate the context — handle every case and extract a plain value |
| `GetOrElse` | `getOrElse` / `fromMaybe` | Extract the value or use a default (Option/Result) |
| `Filter` | `filter` / `mfilter` | Keep only values satisfying a predicate |
| `Flatten` | `flatten` / `join` | Remove one level of nesting (`M[M[A]]``M[A]`) |
| `ChainFirst` | `chainFirst` / `>>` | Sequence for side effects; keeps the original value |
| `Alt` | `alt` / `<\|>` | Provide an alternative when the first computation fails |
| `FromPredicate` | `fromPredicate` / `guard` | Build a monadic value from a predicate |
| `Sequence` | `sequence` | Turn `[]M[A]` into `M[[]A]` |
| `Traverse` | `traverse` | Map and sequence in one step |
Curried (composable) vs. monadic (direct) form:
```go
// Curried — data last, returns a transformer function
option.Map(strings.ToUpper) // func(Option[string]) Option[string]
// Monadic — data first, immediate execution
option.MonadMap(option.Some("hello"), strings.ToUpper)
```
Use curried form for pipelines; use `Monad*` form when you already have all arguments.
## Key Type Aliases (defined per monad)
```go
// A Kleisli arrow: a function from A to a monadic B
type Kleisli[A, B any] = func(A) M[B]
// An operator: transforms one monadic value into another
type Operator[A, B any] = func(M[A]) M[B]
```
`Chain` takes a `Kleisli`, `Map` returns an `Operator`. The naming is consistent across all monads.
## Examples
### Option — nullable values without nil
```go
import (
O "github.com/IBM/fp-go/v2/option"
F "github.com/IBM/fp-go/v2/function"
"strconv"
)
parseAndDouble := F.Flow2(
O.FromPredicate(func(s string) bool { return s != "" }),
O.Chain(func(s string) O.Option[int] {
n, err := strconv.Atoi(s)
if err != nil {
return O.None[int]()
}
return O.Some(n * 2)
}),
)
parseAndDouble("21") // Some(42)
parseAndDouble("") // None
parseAndDouble("abc") // None
```
### Result — error handling without if-err boilerplate
```go
import (
R "github.com/IBM/fp-go/v2/result"
F "github.com/IBM/fp-go/v2/function"
"strconv"
"errors"
)
parse := R.Eitherize1(strconv.Atoi) // lifts (int, error) → Result[int]
validate := func(n int) R.Result[int] {
if n < 0 {
return R.Error[int](errors.New("must be non-negative"))
}
return R.Of(n)
}
pipeline := F.Flow2(parse, R.Chain(validate))
pipeline("42") // Ok(42)
pipeline("-1") // Error("must be non-negative")
pipeline("abc") // Error(strconv parse error)
```
### IOResult — lazy IO with error handling
```go
import (
IOE "github.com/IBM/fp-go/v2/ioresult"
F "github.com/IBM/fp-go/v2/function"
J "github.com/IBM/fp-go/v2/json"
"os"
)
readConfig := F.Flow2(
IOE.Eitherize1(os.ReadFile), // func(string) IOResult[[]byte]
IOE.ChainEitherK(J.Unmarshal[Config]), // parse JSON, propagate errors
)
result := readConfig("config.json")() // execute lazily
```
### ReaderIOResult — context-aware pipelines (recommended for services)
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
"context"
)
// type ReaderIOResult[A any] = func(context.Context) func() result.Result[A]
fetchUser := func(id int) RIO.ReaderIOResult[User] {
return func(ctx context.Context) func() result.Result[User] {
return func() result.Result[User] {
// perform IO here
}
}
}
pipeline := F.Pipe3(
fetchUser(42),
RIO.ChainEitherK(validateUser), // lift pure (User, error) function
RIO.Map(enrichUser), // lift pure User → User function
RIO.ChainFirstIOK(IO.Logf[User]("Fetched: %v")), // side-effect logging
)
user, err := pipeline(ctx)() // provide context once, execute
```
### Traversal — process slices monadically
```go
import (
A "github.com/IBM/fp-go/v2/array"
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
)
// Fetch all users, stop on first error
fetchAll := F.Pipe1(
A.MakeBy(10, userID),
RIO.TraverseArray(fetchUser), // []ReaderIOResult[User] → ReaderIOResult[[]User]
)
```
## Function Composition with Flow and Pipe
```go
import F "github.com/IBM/fp-go/v2/function"
// Flow: compose functions left-to-right, returns a new function
transform := F.Flow3(
option.Map(strings.TrimSpace),
option.Filter(func(s string) bool { return s != "" }),
option.GetOrElse(func() string { return "default" }),
)
result := transform(option.Some(" hello ")) // "hello"
// Pipe: apply a value through a pipeline immediately
result := F.Pipe3(
option.Some(" hello "),
option.Map(strings.TrimSpace),
option.Filter(func(s string) bool { return s != "" }),
option.GetOrElse(func() string { return "default" }),
)
```
## Lifting Pure Functions into Monadic Context
fp-go provides helpers to promote non-monadic functions:
| Helper | Lifts |
|--------|-------|
| `ChainEitherK` | `func(A) (B, error)` → works inside the monad |
| `ChainOptionK` | `func(A) Option[B]` → works inside the monad |
| `ChainFirstIOK` | `func(A) IO[B]` for side effects, keeps original value |
| `Eitherize1..N` | `func(A) (B, error)``func(A) Result[B]` |
| `FromPredicate` | `func(A) bool` + error builder → `func(A) Result[A]` |
## Type Parameter Ordering Rule (V2)
Non-inferrable type parameters come **first**, so the compiler can infer the rest:
```go
// B cannot be inferred from the argument — it comes first
result := either.Ap[string](value)(funcInEither)
// All types inferrable — no explicit params needed
result := either.Map(transform)(value)
result := either.Chain(validator)(value)
```
## When to Use Which Monad
| Situation | Use |
|-----------|-----|
| Value that might be absent | `Option[A]` |
| Operation that can fail with custom error type | `Either[E, A]` |
| Operation that can fail with `error` | `Result[A]` |
| Lazy IO, side effects | `IO[A]` |
| IO that can fail | `IOResult[A]` |
| IO + context (cancellation, deadlines) | `ReaderIOResult[A]` from `context/readerioresult` |
| IO + context + typed dependencies | `Effect[C, A]` |
| High-performance services | Idiomatic packages in `idiomatic/` |
## Do-Notation: Accumulating State with `Bind` and `ApS`
When a pipeline needs to carry **multiple intermediate results** forward — not just a single value — the `Chain`/`Map` style becomes unwieldy because each step only threads one value and prior results are lost. Do-notation solves this by accumulating results into a growing struct (the "state") at each step.
Every monad that supports do-notation exports the same family of functions. The examples below use `context/readerioresult` (`RIO`), but the identical API is available in `result`, `option`, `ioresult`, `readerioresult`, and others.
### The Function Family
| Function | Kind | What it does |
|----------|------|-------------|
| `Do(empty S)` | — | Lift an empty struct into the monad; starting point |
| `BindTo(setter)` | monadic | Convert an existing `M[T]` into `M[S]`; alternative start |
| `Bind(setter, f)` | monadic | Add a result; `f` receives the **current state** and returns `M[T]` |
| `ApS(setter, fa)` | applicative | Add a result; `fa` is **independent** of the current state |
| `Let(setter, f)` | pure | Add a value computed by a **pure function** of the state |
| `LetTo(setter, value)` | pure | Add a **constant** value |
Lens variants (`BindL`, `ApSL`, `LetL`, `LetToL`) accept a `Lens[S, T]` instead of a manual setter, integrating naturally with the optics system.
### `Bind` — Sequential, Dependent Steps
`Bind` sequences two monadic computations. The function `f` receives the **full accumulated state** so it can read anything gathered so far. Errors short-circuit automatically.
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
"context"
)
type Pipeline struct {
User User
Config Config
Posts []Post
}
// Lenses — focus on individual fields; .Set is already func(T) func(S) S
var (
userLens = L.MakeLens(func(s Pipeline) User { return s.User }, func(s Pipeline, u User) Pipeline { s.User = u; return s })
configLens = L.MakeLens(func(s Pipeline) Config { return s.Config }, func(s Pipeline, c Config) Pipeline { s.Config = c; return s })
postsLens = L.MakeLens(func(s Pipeline) []Post { return s.Posts }, func(s Pipeline, p []Post) Pipeline { s.Posts = p; return s })
)
result := F.Pipe3(
RIO.Do(Pipeline{}), // lift empty struct
RIO.Bind(userLens.Set, func(_ Pipeline) RIO.ReaderIOResult[User] { return fetchUser(42) }),
RIO.Bind(configLens.Set, F.Flow2(userLens.Get, fetchConfigForUser)), // read s.User, pass to fetcher
RIO.Bind(postsLens.Set, F.Flow2(userLens.Get, fetchPostsForUser)), // read s.User, pass to fetcher
)
pipeline, err := result(context.Background())()
// pipeline.User, pipeline.Config, pipeline.Posts are all populated
```
The setter signature is `func(T) func(S1) S2` — it takes the new value and returns a state transformer. `lens.Set` already has this shape, so no manual setter functions are needed. `F.Flow2(lens.Get, f)` composes the field getter with any Kleisli arrow `f` point-free.
### `ApS` — Independent, Applicative Steps
`ApS` uses **applicative** semantics: `fa` is evaluated without any access to the current state. Use it when steps have no dependency on each other — the library can choose to execute them concurrently.
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
)
type Summary struct {
User User
Weather Weather
}
var (
userLens = L.MakeLens(func(s Summary) User { return s.User }, func(s Summary, u User) Summary { s.User = u; return s })
weatherLens = L.MakeLens(func(s Summary) Weather { return s.Weather }, func(s Summary, w Weather) Summary { s.Weather = w; return s })
)
// Both are independent — neither needs the other's result
result := F.Pipe2(
RIO.Do(Summary{}),
RIO.ApS(userLens.Set, fetchUser(42)),
RIO.ApS(weatherLens.Set, fetchWeather("NYC")),
)
```
**Key difference from `Bind`:**
| | `Bind(setter, f)` | `ApS(setter, fa)` |
|-|---|---|
| Second argument | `func(S1) M[T]` — a **function** of state | `M[T]` — a **fixed** monadic value |
| Can read prior state? | Yes — receives `S1` | No — no access to state |
| Semantics | Monadic (sequential) | Applicative (independent) |
### `Let` and `LetTo` — Pure Additions
`Let` adds a value computed by a **pure function** of the current state (no monad, cannot fail):
```go
import (
RIO "github.com/IBM/fp-go/v2/context/readerioresult"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
)
type Enriched struct {
User User
FullName string
}
var (
userLens = L.MakeLens(func(s Enriched) User { return s.User }, func(s Enriched, u User) Enriched { s.User = u; return s })
fullNameLens = L.MakeLens(func(s Enriched) string { return s.FullName }, func(s Enriched, n string) Enriched { s.FullName = n; return s })
)
fullName := func(u User) string { return u.FirstName + " " + u.LastName }
result := F.Pipe2(
RIO.Do(Enriched{}),
RIO.Bind(userLens.Set, func(_ Enriched) RIO.ReaderIOResult[User] { return fetchUser(42) }),
RIO.Let(fullNameLens.Set, F.Flow2(userLens.Get, fullName)), // read s.User, compute pure string
)
```
`LetTo` adds a **constant** with no computation:
```go
RIO.LetTo(setVersion, "v1.2.3")
```
### `BindTo` — Starting from an Existing Value
When you have an existing `M[T]` and want to project it into a state struct rather than starting from `Do(empty)`:
```go
type State struct{ User User }
result := F.Pipe1(
fetchUser(42), // ReaderIOResult[User]
RIO.BindTo(func(u User) State { return State{User: u} }),// ReaderIOResult[State]
)
```
### Lens Variants (`ApSL`, `BindL`, `LetL`, `LetToL`)
If you have a `Lens[S, T]` (from the optics system or code generation), you can skip writing the setter function entirely:
```go
import (
RO "github.com/IBM/fp-go/v2/readeroption"
F "github.com/IBM/fp-go/v2/function"
)
// Lenses generated by go:generate (see optics/README.md)
// personLenses.Name : Lens[*Person, Name]
// personLenses.Age : Lens[*Person, Age]
makePerson := F.Pipe2(
RO.Do[*PartialPerson](emptyPerson),
RO.ApSL(personLenses.Name, maybeName), // replaces: ApS(personLenses.Name.Set, maybeName)
RO.ApSL(personLenses.Age, maybeAge),
)
```
This exact pattern is used in [`samples/builder`](samples/builder/builder.go) to validate and construct a `Person` from an unvalidated `PartialPerson`.
### Lifted Variants for Mixed Monads
`context/readerioresult` provides `Bind*K` helpers that lift simpler computations directly into the do-chain:
| Helper | Lifts |
|--------|-------|
| `BindResultK` / `BindEitherK` | `func(S1) (T, error)` — pure result |
| `BindIOResultK` / `BindIOEitherK` | `func(S1) func() (T, error)` — lazy IO result |
| `BindIOK` | `func(S1) func() T` — infallible IO |
| `BindReaderK` | `func(S1) func(ctx) T` — context reader |
```go
RIO.BindResultK(setUser, func(s Pipeline) (User, error) {
return validateAndBuild(s) // plain (value, error) function, no wrapping needed
})
```
### Decision Guide
```
Does the new step need to read prior accumulated state?
YES → Bind (monadic, sequential; f receives current S)
NO → ApS (applicative, independent; fa is a fixed M[T])
Is the new value derived purely from state, with no monad?
YES → Let (pure function of S)
Is the new value a compile-time or runtime constant?
YES → LetTo
Starting from an existing M[T] rather than an empty struct?
YES → BindTo
```
### Complete Example — `result` Monad
The same pattern works with simpler monads. Here with `result.Result[A]`:
`Eitherize1` converts any standard `func(A) (B, error)` into `func(A) Result[B]`. Define these lifted functions once as variables. Then use lenses to focus on individual struct fields and compose with `F.Flow2(lens.Get, f)` — no inline lambdas, no manual error handling.
```go
import (
R "github.com/IBM/fp-go/v2/result"
F "github.com/IBM/fp-go/v2/function"
L "github.com/IBM/fp-go/v2/optics/lens"
N "github.com/IBM/fp-go/v2/number"
"strconv"
)
type Parsed struct {
Raw string
Number int
Double int
}
// Lenses — focus on individual fields of Parsed.
var (
rawLens = L.MakeLens(
func(s Parsed) string { return s.Raw },
func(s Parsed, v string) Parsed { s.Raw = v; return s },
)
numberLens = L.MakeLens(
func(s Parsed) int { return s.Number },
func(s Parsed, v int) Parsed { s.Number = v; return s },
)
doubleLens = L.MakeLens(
func(s Parsed) int { return s.Double },
func(s Parsed, v int) Parsed { s.Double = v; return s },
)
)
// Lifted functions — convert standard (value, error) functions into Result-returning ones.
var (
atoi = R.Eitherize1(strconv.Atoi) // func(string) Result[int]
)
parse := func(input string) R.Result[Parsed] {
return F.Pipe3(
R.Do(Parsed{}),
R.LetTo(rawLens.Set, input), // set Raw to constant input
R.Bind(numberLens.Set, F.Flow2(rawLens.Get, atoi)), // get Raw, parse → Result[int]
R.Let(doubleLens.Set, F.Flow2(numberLens.Get, N.Mul(2))), // get Number, multiply → int
)
}
parse("21") // Ok(Parsed{Raw:"21", Number:21, Double:42})
parse("abc") // Error(strconv parse error)
```
`rawLens.Set` is already `func(string) func(Parsed) Parsed`, matching the setter signature `Bind` and `LetTo` expect — no manual setter functions to write. `F.Flow2(rawLens.Get, atoi)` composes the field getter with the eitherized parse function into a `Kleisli[Parsed, int]` without any intermediate lambda.
## Import Paths
```go
import (
"github.com/IBM/fp-go/v2/option"
"github.com/IBM/fp-go/v2/result"
"github.com/IBM/fp-go/v2/either"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioresult"
"github.com/IBM/fp-go/v2/context/readerioresult"
"github.com/IBM/fp-go/v2/effect"
F "github.com/IBM/fp-go/v2/function"
A "github.com/IBM/fp-go/v2/array"
)
```
Requires Go 1.24+ (generic type aliases).

522
v2/array/array_nil_test.go Normal file
View File

@@ -0,0 +1,522 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package array
import (
"fmt"
"testing"
O "github.com/IBM/fp-go/v2/option"
P "github.com/IBM/fp-go/v2/pair"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
)
// TestNilSlice_IsEmpty verifies that IsEmpty handles nil slices correctly
func TestNilSlice_IsEmpty(t *testing.T) {
var nilSlice []int
assert.True(t, IsEmpty(nilSlice), "nil slice should be empty")
}
// TestNilSlice_IsNonEmpty verifies that IsNonEmpty handles nil slices correctly
func TestNilSlice_IsNonEmpty(t *testing.T) {
var nilSlice []int
assert.False(t, IsNonEmpty(nilSlice), "nil slice should not be non-empty")
}
// TestNilSlice_MonadMap verifies that MonadMap handles nil slices correctly
func TestNilSlice_MonadMap(t *testing.T) {
var nilSlice []int
result := MonadMap(nilSlice, func(v int) string {
return fmt.Sprintf("%d", v)
})
assert.NotNil(t, result, "MonadMap should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadMap should return empty slice for nil input")
}
// TestNilSlice_MonadMapRef verifies that MonadMapRef handles nil slices correctly
func TestNilSlice_MonadMapRef(t *testing.T) {
var nilSlice []int
result := MonadMapRef(nilSlice, func(v *int) string {
return fmt.Sprintf("%d", *v)
})
assert.NotNil(t, result, "MonadMapRef should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadMapRef should return empty slice for nil input")
}
// TestNilSlice_Map verifies that Map handles nil slices correctly
func TestNilSlice_Map(t *testing.T) {
var nilSlice []int
mapper := Map(func(v int) string {
return fmt.Sprintf("%d", v)
})
result := mapper(nilSlice)
assert.NotNil(t, result, "Map should return non-nil slice")
assert.Equal(t, 0, len(result), "Map should return empty slice for nil input")
}
// TestNilSlice_MapRef verifies that MapRef handles nil slices correctly
func TestNilSlice_MapRef(t *testing.T) {
var nilSlice []int
mapper := MapRef(func(v *int) string {
return fmt.Sprintf("%d", *v)
})
result := mapper(nilSlice)
assert.NotNil(t, result, "MapRef should return non-nil slice")
assert.Equal(t, 0, len(result), "MapRef should return empty slice for nil input")
}
// TestNilSlice_MapWithIndex verifies that MapWithIndex handles nil slices correctly
func TestNilSlice_MapWithIndex(t *testing.T) {
var nilSlice []int
mapper := MapWithIndex(func(i int, v int) string {
return fmt.Sprintf("%d:%d", i, v)
})
result := mapper(nilSlice)
assert.NotNil(t, result, "MapWithIndex should return non-nil slice")
assert.Equal(t, 0, len(result), "MapWithIndex should return empty slice for nil input")
}
// TestNilSlice_Filter verifies that Filter handles nil slices correctly
func TestNilSlice_Filter(t *testing.T) {
var nilSlice []int
filter := Filter(func(v int) bool {
return v > 0
})
result := filter(nilSlice)
assert.NotNil(t, result, "Filter should return non-nil slice")
assert.Equal(t, 0, len(result), "Filter should return empty slice for nil input")
}
// TestNilSlice_FilterWithIndex verifies that FilterWithIndex handles nil slices correctly
func TestNilSlice_FilterWithIndex(t *testing.T) {
var nilSlice []int
filter := FilterWithIndex(func(i int, v int) bool {
return v > 0
})
result := filter(nilSlice)
assert.NotNil(t, result, "FilterWithIndex should return non-nil slice")
assert.Equal(t, 0, len(result), "FilterWithIndex should return empty slice for nil input")
}
// TestNilSlice_FilterRef verifies that FilterRef handles nil slices correctly
func TestNilSlice_FilterRef(t *testing.T) {
var nilSlice []int
filter := FilterRef(func(v *int) bool {
return *v > 0
})
result := filter(nilSlice)
assert.NotNil(t, result, "FilterRef should return non-nil slice")
assert.Equal(t, 0, len(result), "FilterRef should return empty slice for nil input")
}
// TestNilSlice_MonadFilterMap verifies that MonadFilterMap handles nil slices correctly
func TestNilSlice_MonadFilterMap(t *testing.T) {
var nilSlice []int
result := MonadFilterMap(nilSlice, func(v int) O.Option[string] {
return O.Some(fmt.Sprintf("%d", v))
})
assert.NotNil(t, result, "MonadFilterMap should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadFilterMap should return empty slice for nil input")
}
// TestNilSlice_MonadFilterMapWithIndex verifies that MonadFilterMapWithIndex handles nil slices correctly
func TestNilSlice_MonadFilterMapWithIndex(t *testing.T) {
var nilSlice []int
result := MonadFilterMapWithIndex(nilSlice, func(i int, v int) O.Option[string] {
return O.Some(fmt.Sprintf("%d:%d", i, v))
})
assert.NotNil(t, result, "MonadFilterMapWithIndex should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadFilterMapWithIndex should return empty slice for nil input")
}
// TestNilSlice_FilterMap verifies that FilterMap handles nil slices correctly
func TestNilSlice_FilterMap(t *testing.T) {
var nilSlice []int
filter := FilterMap(func(v int) O.Option[string] {
return O.Some(fmt.Sprintf("%d", v))
})
result := filter(nilSlice)
assert.NotNil(t, result, "FilterMap should return non-nil slice")
assert.Equal(t, 0, len(result), "FilterMap should return empty slice for nil input")
}
// TestNilSlice_FilterMapWithIndex verifies that FilterMapWithIndex handles nil slices correctly
func TestNilSlice_FilterMapWithIndex(t *testing.T) {
var nilSlice []int
filter := FilterMapWithIndex(func(i int, v int) O.Option[string] {
return O.Some(fmt.Sprintf("%d:%d", i, v))
})
result := filter(nilSlice)
assert.NotNil(t, result, "FilterMapWithIndex should return non-nil slice")
assert.Equal(t, 0, len(result), "FilterMapWithIndex should return empty slice for nil input")
}
// TestNilSlice_MonadReduce verifies that MonadReduce handles nil slices correctly
func TestNilSlice_MonadReduce(t *testing.T) {
var nilSlice []int
result := MonadReduce(nilSlice, func(acc int, v int) int {
return acc + v
}, 10)
assert.Equal(t, 10, result, "MonadReduce should return initial value for nil slice")
}
// TestNilSlice_MonadReduceWithIndex verifies that MonadReduceWithIndex handles nil slices correctly
func TestNilSlice_MonadReduceWithIndex(t *testing.T) {
var nilSlice []int
result := MonadReduceWithIndex(nilSlice, func(i int, acc int, v int) int {
return acc + v
}, 10)
assert.Equal(t, 10, result, "MonadReduceWithIndex should return initial value for nil slice")
}
// TestNilSlice_Reduce verifies that Reduce handles nil slices correctly
func TestNilSlice_Reduce(t *testing.T) {
var nilSlice []int
reducer := Reduce(func(acc int, v int) int {
return acc + v
}, 10)
result := reducer(nilSlice)
assert.Equal(t, 10, result, "Reduce should return initial value for nil slice")
}
// TestNilSlice_ReduceWithIndex verifies that ReduceWithIndex handles nil slices correctly
func TestNilSlice_ReduceWithIndex(t *testing.T) {
var nilSlice []int
reducer := ReduceWithIndex(func(i int, acc int, v int) int {
return acc + v
}, 10)
result := reducer(nilSlice)
assert.Equal(t, 10, result, "ReduceWithIndex should return initial value for nil slice")
}
// TestNilSlice_ReduceRight verifies that ReduceRight handles nil slices correctly
func TestNilSlice_ReduceRight(t *testing.T) {
var nilSlice []int
reducer := ReduceRight(func(v int, acc int) int {
return acc + v
}, 10)
result := reducer(nilSlice)
assert.Equal(t, 10, result, "ReduceRight should return initial value for nil slice")
}
// TestNilSlice_ReduceRightWithIndex verifies that ReduceRightWithIndex handles nil slices correctly
func TestNilSlice_ReduceRightWithIndex(t *testing.T) {
var nilSlice []int
reducer := ReduceRightWithIndex(func(i int, v int, acc int) int {
return acc + v
}, 10)
result := reducer(nilSlice)
assert.Equal(t, 10, result, "ReduceRightWithIndex should return initial value for nil slice")
}
// TestNilSlice_ReduceRef verifies that ReduceRef handles nil slices correctly
func TestNilSlice_ReduceRef(t *testing.T) {
var nilSlice []int
reducer := ReduceRef(func(acc int, v *int) int {
return acc + *v
}, 10)
result := reducer(nilSlice)
assert.Equal(t, 10, result, "ReduceRef should return initial value for nil slice")
}
// TestNilSlice_Append verifies that Append handles nil slices correctly
func TestNilSlice_Append(t *testing.T) {
var nilSlice []int
result := Append(nilSlice, 42)
assert.NotNil(t, result, "Append should return non-nil slice")
assert.Equal(t, 1, len(result), "Append should create slice with one element")
assert.Equal(t, 42, result[0], "Append should add element correctly")
}
// TestNilSlice_MonadChain verifies that MonadChain handles nil slices correctly
func TestNilSlice_MonadChain(t *testing.T) {
var nilSlice []int
result := MonadChain(nilSlice, func(v int) []string {
return []string{fmt.Sprintf("%d", v)}
})
assert.NotNil(t, result, "MonadChain should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadChain should return empty slice for nil input")
}
// TestNilSlice_Chain verifies that Chain handles nil slices correctly
func TestNilSlice_Chain(t *testing.T) {
var nilSlice []int
chain := Chain(func(v int) []string {
return []string{fmt.Sprintf("%d", v)}
})
result := chain(nilSlice)
assert.NotNil(t, result, "Chain should return non-nil slice")
assert.Equal(t, 0, len(result), "Chain should return empty slice for nil input")
}
// TestNilSlice_MonadAp verifies that MonadAp handles nil slices correctly
func TestNilSlice_MonadAp(t *testing.T) {
var nilFuncs []func(int) string
var nilValues []int
// nil functions, nil values
result1 := MonadAp(nilFuncs, nilValues)
assert.NotNil(t, result1, "MonadAp should return non-nil slice")
assert.Equal(t, 0, len(result1), "MonadAp should return empty slice for nil inputs")
// nil functions, non-nil values
nonNilValues := []int{1, 2, 3}
result2 := MonadAp(nilFuncs, nonNilValues)
assert.NotNil(t, result2, "MonadAp should return non-nil slice")
assert.Equal(t, 0, len(result2), "MonadAp should return empty slice when functions are nil")
// non-nil functions, nil values
nonNilFuncs := []func(int) string{func(v int) string { return fmt.Sprintf("%d", v) }}
result3 := MonadAp(nonNilFuncs, nilValues)
assert.NotNil(t, result3, "MonadAp should return non-nil slice")
assert.Equal(t, 0, len(result3), "MonadAp should return empty slice when values are nil")
}
// TestNilSlice_Ap verifies that Ap handles nil slices correctly
func TestNilSlice_Ap(t *testing.T) {
var nilValues []int
ap := Ap[string](nilValues)
var nilFuncs []func(int) string
result := ap(nilFuncs)
assert.NotNil(t, result, "Ap should return non-nil slice")
assert.Equal(t, 0, len(result), "Ap should return empty slice for nil inputs")
}
// TestNilSlice_Head verifies that Head handles nil slices correctly
func TestNilSlice_Head(t *testing.T) {
var nilSlice []int
result := Head(nilSlice)
assert.True(t, O.IsNone(result), "Head should return None for nil slice")
}
// TestNilSlice_First verifies that First handles nil slices correctly
func TestNilSlice_First(t *testing.T) {
var nilSlice []int
result := First(nilSlice)
assert.True(t, O.IsNone(result), "First should return None for nil slice")
}
// TestNilSlice_Last verifies that Last handles nil slices correctly
func TestNilSlice_Last(t *testing.T) {
var nilSlice []int
result := Last(nilSlice)
assert.True(t, O.IsNone(result), "Last should return None for nil slice")
}
// TestNilSlice_Tail verifies that Tail handles nil slices correctly
func TestNilSlice_Tail(t *testing.T) {
var nilSlice []int
result := Tail(nilSlice)
assert.True(t, O.IsNone(result), "Tail should return None for nil slice")
}
// TestNilSlice_Flatten verifies that Flatten handles nil slices correctly
func TestNilSlice_Flatten(t *testing.T) {
var nilSlice [][]int
result := Flatten(nilSlice)
assert.NotNil(t, result, "Flatten should return non-nil slice")
assert.Equal(t, 0, len(result), "Flatten should return empty slice for nil input")
}
// TestNilSlice_Lookup verifies that Lookup handles nil slices correctly
func TestNilSlice_Lookup(t *testing.T) {
var nilSlice []int
lookup := Lookup[int](0)
result := lookup(nilSlice)
assert.True(t, O.IsNone(result), "Lookup should return None for nil slice")
}
// TestNilSlice_Size verifies that Size handles nil slices correctly
func TestNilSlice_Size(t *testing.T) {
var nilSlice []int
result := Size(nilSlice)
assert.Equal(t, 0, result, "Size should return 0 for nil slice")
}
// TestNilSlice_MonadPartition verifies that MonadPartition handles nil slices correctly
func TestNilSlice_MonadPartition(t *testing.T) {
var nilSlice []int
result := MonadPartition(nilSlice, func(v int) bool {
return v > 0
})
left := P.Head(result)
right := P.Tail(result)
assert.NotNil(t, left, "MonadPartition left should return non-nil slice")
assert.NotNil(t, right, "MonadPartition right should return non-nil slice")
assert.Equal(t, 0, len(left), "MonadPartition left should be empty for nil input")
assert.Equal(t, 0, len(right), "MonadPartition right should be empty for nil input")
}
// TestNilSlice_Partition verifies that Partition handles nil slices correctly
func TestNilSlice_Partition(t *testing.T) {
var nilSlice []int
partition := Partition(func(v int) bool {
return v > 0
})
result := partition(nilSlice)
left := P.Head(result)
right := P.Tail(result)
assert.NotNil(t, left, "Partition left should return non-nil slice")
assert.NotNil(t, right, "Partition right should return non-nil slice")
assert.Equal(t, 0, len(left), "Partition left should be empty for nil input")
assert.Equal(t, 0, len(right), "Partition right should be empty for nil input")
}
// TestNilSlice_IsNil verifies that IsNil handles nil slices correctly
func TestNilSlice_IsNil(t *testing.T) {
var nilSlice []int
assert.True(t, IsNil(nilSlice), "IsNil should return true for nil slice")
nonNilSlice := []int{}
assert.False(t, IsNil(nonNilSlice), "IsNil should return false for non-nil empty slice")
}
// TestNilSlice_IsNonNil verifies that IsNonNil handles nil slices correctly
func TestNilSlice_IsNonNil(t *testing.T) {
var nilSlice []int
assert.False(t, IsNonNil(nilSlice), "IsNonNil should return false for nil slice")
nonNilSlice := []int{}
assert.True(t, IsNonNil(nonNilSlice), "IsNonNil should return true for non-nil empty slice")
}
// TestNilSlice_Copy verifies that Copy handles nil slices correctly
func TestNilSlice_Copy(t *testing.T) {
var nilSlice []int
result := Copy(nilSlice)
assert.NotNil(t, result, "Copy should return non-nil slice")
assert.Equal(t, 0, len(result), "Copy should return empty slice for nil input")
}
// TestNilSlice_FoldMap verifies that FoldMap handles nil slices correctly
func TestNilSlice_FoldMap(t *testing.T) {
var nilSlice []int
monoid := S.Monoid
foldMap := FoldMap[int](monoid)(func(v int) string {
return fmt.Sprintf("%d", v)
})
result := foldMap(nilSlice)
assert.Equal(t, "", result, "FoldMap should return empty value for nil slice")
}
// TestNilSlice_FoldMapWithIndex verifies that FoldMapWithIndex handles nil slices correctly
func TestNilSlice_FoldMapWithIndex(t *testing.T) {
var nilSlice []int
monoid := S.Monoid
foldMap := FoldMapWithIndex[int](monoid)(func(i int, v int) string {
return fmt.Sprintf("%d:%d", i, v)
})
result := foldMap(nilSlice)
assert.Equal(t, "", result, "FoldMapWithIndex should return empty value for nil slice")
}
// TestNilSlice_Fold verifies that Fold handles nil slices correctly
func TestNilSlice_Fold(t *testing.T) {
var nilSlice []string
monoid := S.Monoid
fold := Fold[string](monoid)
result := fold(nilSlice)
assert.Equal(t, "", result, "Fold should return empty value for nil slice")
}
// TestNilSlice_Concat verifies that Concat handles nil slices correctly
func TestNilSlice_Concat(t *testing.T) {
var nilSlice []int
nonNilSlice := []int{1, 2, 3}
// nil concat non-nil
concat1 := Concat(nonNilSlice)
result1 := concat1(nilSlice)
assert.Equal(t, nonNilSlice, result1, "nil concat non-nil should return non-nil slice")
// non-nil concat nil
concat2 := Concat(nilSlice)
result2 := concat2(nonNilSlice)
assert.Equal(t, nonNilSlice, result2, "non-nil concat nil should return non-nil slice")
// nil concat nil
concat3 := Concat(nilSlice)
result3 := concat3(nilSlice)
assert.Nil(t, result3, "nil concat nil should return nil")
}
// TestNilSlice_MonadFlap verifies that MonadFlap handles nil slices correctly
func TestNilSlice_MonadFlap(t *testing.T) {
var nilSlice []func(int) string
result := MonadFlap(nilSlice, 42)
assert.NotNil(t, result, "MonadFlap should return non-nil slice")
assert.Equal(t, 0, len(result), "MonadFlap should return empty slice for nil input")
}
// TestNilSlice_Flap verifies that Flap handles nil slices correctly
func TestNilSlice_Flap(t *testing.T) {
var nilSlice []func(int) string
flap := Flap[string, int](42)
result := flap(nilSlice)
assert.NotNil(t, result, "Flap should return non-nil slice")
assert.Equal(t, 0, len(result), "Flap should return empty slice for nil input")
}
// TestNilSlice_Reverse verifies that Reverse handles nil slices correctly
func TestNilSlice_Reverse(t *testing.T) {
var nilSlice []int
result := Reverse(nilSlice)
assert.Nil(t, result, "Reverse should return nil for nil slice")
}
// TestNilSlice_Extend verifies that Extend handles nil slices correctly
func TestNilSlice_Extend(t *testing.T) {
var nilSlice []int
extend := Extend(func(as []int) string {
return fmt.Sprintf("%v", as)
})
result := extend(nilSlice)
assert.NotNil(t, result, "Extend should return non-nil slice")
assert.Equal(t, 0, len(result), "Extend should return empty slice for nil input")
}
// TestNilSlice_Empty verifies that Empty creates an empty non-nil slice
func TestNilSlice_Empty(t *testing.T) {
result := Empty[int]()
assert.NotNil(t, result, "Empty should return non-nil slice")
assert.Equal(t, 0, len(result), "Empty should return empty slice")
assert.False(t, IsNil(result), "Empty should not return nil slice")
}
// TestNilSlice_Zero verifies that Zero creates an empty non-nil slice
func TestNilSlice_Zero(t *testing.T) {
result := Zero[int]()
assert.NotNil(t, result, "Zero should return non-nil slice")
assert.Equal(t, 0, len(result), "Zero should return empty slice")
assert.False(t, IsNil(result), "Zero should not return nil slice")
}
// TestNilSlice_ConstNil verifies that ConstNil returns a nil slice
func TestNilSlice_ConstNil(t *testing.T) {
result := ConstNil[int]()
assert.Nil(t, result, "ConstNil should return nil slice")
assert.True(t, IsNil(result), "ConstNil should return nil slice")
}
// TestNilSlice_Of verifies that Of creates a proper singleton slice
func TestNilSlice_Of(t *testing.T) {
result := Of(42)
assert.NotNil(t, result, "Of should return non-nil slice")
assert.Equal(t, 1, len(result), "Of should create slice with one element")
assert.Equal(t, 42, result[0], "Of should set value correctly")
}

130
v2/context/reader/reader.go Normal file
View File

@@ -0,0 +1,130 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package reader provides a specialization of the Reader monad for [context.Context].
//
// This package offers a context-aware Reader monad that simplifies working with
// Go's [context.Context] in a functional programming style. It eliminates the need
// to explicitly thread context through function calls while maintaining type safety
// and composability.
//
// # Core Concept
//
// The Reader monad represents computations that depend on a shared environment.
// In this package, that environment is fixed to [context.Context], making it
// particularly useful for:
//
// - Request-scoped data propagation
// - Cancellation and timeout handling
// - Dependency injection via context values
// - Avoiding explicit context parameter threading
//
// # Type Definitions
//
// - Reader[A]: A computation that depends on context.Context and produces A
// - Kleisli[A, B]: A function from A to Reader[B] for composing computations
// - Operator[A, B]: A transformation from Reader[A] to Reader[B]
//
// # Usage Pattern
//
// Instead of passing context explicitly through every function:
//
// func processUser(ctx context.Context, userID string) (User, error) {
// user := fetchUser(ctx, userID)
// profile := fetchProfile(ctx, user.ProfileID)
// return enrichUser(ctx, user, profile), nil
// }
//
// You can use Reader to compose context-dependent operations:
//
// fetchUser := func(userID string) Reader[User] {
// return func(ctx context.Context) User {
// // Use ctx for database access, cancellation, etc.
// return queryDatabase(ctx, userID)
// }
// }
//
// processUser := func(userID string) Reader[User] {
// return F.Pipe2(
// fetchUser(userID),
// reader.Chain(func(user User) Reader[Profile] {
// return fetchProfile(user.ProfileID)
// }),
// reader.Map(func(profile Profile) User {
// return enrichUser(user, profile)
// }),
// )
// }
//
// // Execute with context
// ctx := context.Background()
// user := processUser("user123")(ctx)
//
// # Integration with Standard Library
//
// This package works seamlessly with Go's standard [context] package:
//
// - Context cancellation and deadlines are preserved
// - Context values can be accessed within Reader computations
// - Readers can be composed with context-aware libraries
//
// # Relationship to Other Packages
//
// This package is a specialization of [github.com/IBM/fp-go/v2/reader] where
// the environment type R is fixed to [context.Context]. For more general
// Reader operations, see the base reader package.
//
// For combining Reader with other monads:
// - [github.com/IBM/fp-go/v2/context/readerio]: Reader + IO effects
// - [github.com/IBM/fp-go/v2/readeroption]: Reader + Option
// - [github.com/IBM/fp-go/v2/readerresult]: Reader + Result (Either)
//
// # Example: HTTP Request Handler
//
// type RequestContext struct {
// UserID string
// RequestID string
// }
//
// // Extract request context from context.Context
// getRequestContext := func(ctx context.Context) RequestContext {
// return RequestContext{
// UserID: ctx.Value("userID").(string),
// RequestID: ctx.Value("requestID").(string),
// }
// }
//
// // A Reader that logs with request context
// logInfo := func(message string) Reader[function.Void] {
// return func(ctx context.Context) function.Void {
// reqCtx := getRequestContext(ctx)
// log.Printf("[%s] User %s: %s", reqCtx.RequestID, reqCtx.UserID, message)
// return function.VOID
// }
// }
//
// // Compose operations
// handleRequest := func(data string) Reader[Response] {
// return F.Pipe2(
// logInfo("Processing request"),
// reader.Chain(func(_ function.Void) Reader[Result] {
// return processData(data)
// }),
// reader.Map(func(result Result) Response {
// return Response{Data: result}
// }),
// )
// }
package reader

142
v2/context/reader/types.go Normal file
View File

@@ -0,0 +1,142 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reader
import (
"context"
R "github.com/IBM/fp-go/v2/reader"
)
type (
// Reader represents a computation that depends on a [context.Context] and produces a value of type A.
//
// This is a specialization of the generic Reader monad where the environment type is fixed
// to [context.Context]. This is particularly useful for Go applications that need to thread
// context through computations for cancellation, deadlines, and request-scoped values.
//
// Type Parameters:
// - A: The result type produced by the computation
//
// Reader[A] is equivalent to func(context.Context) A
//
// The Reader monad enables:
// - Dependency injection using context values
// - Cancellation and timeout handling
// - Request-scoped data propagation
// - Avoiding explicit context parameter threading
//
// Example:
//
// // A Reader that extracts a user ID from context
// getUserID := func(ctx context.Context) string {
// if userID, ok := ctx.Value("userID").(string); ok {
// return userID
// }
// return "anonymous"
// }
//
// // A Reader that checks if context is cancelled
// isCancelled := func(ctx context.Context) bool {
// select {
// case <-ctx.Done():
// return true
// default:
// return false
// }
// }
//
// // Use the readers with a context
// ctx := context.WithValue(context.Background(), "userID", "user123")
// userID := getUserID(ctx) // "user123"
// cancelled := isCancelled(ctx) // false
Reader[A any] = R.Reader[context.Context, A]
// Kleisli represents a Kleisli arrow for the context-based Reader monad.
//
// It's a function from A to Reader[B], used for composing Reader computations
// that all depend on the same [context.Context].
//
// Type Parameters:
// - A: The input type
// - B: The output type wrapped in Reader
//
// Kleisli[A, B] is equivalent to func(A) func(context.Context) B
//
// Kleisli arrows are fundamental for monadic composition, allowing you to chain
// operations that depend on context without explicitly passing the context through
// each function call.
//
// Example:
//
// // A Kleisli arrow that creates a greeting Reader from a name
// greet := func(name string) Reader[string] {
// return func(ctx context.Context) string {
// if deadline, ok := ctx.Deadline(); ok {
// return fmt.Sprintf("Hello %s (deadline: %v)", name, deadline)
// }
// return fmt.Sprintf("Hello %s", name)
// }
// }
//
// // Use the Kleisli arrow
// ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
// defer cancel()
// greeting := greet("Alice")(ctx) // "Hello Alice (deadline: ...)"
Kleisli[A, B any] = R.Reader[A, Reader[B]]
// Operator represents a transformation from one Reader to another.
//
// It takes a Reader[A] and produces a Reader[B], where both readers depend on
// the same [context.Context]. This type is commonly used for operations like
// Map, Chain, and other transformations that convert readers while preserving
// the context dependency.
//
// Type Parameters:
// - A: The input Reader's result type
// - B: The output Reader's result type
//
// Operator[A, B] is equivalent to func(Reader[A]) func(context.Context) B
//
// Operators enable building pipelines of context-dependent computations where
// each step can transform the result of the previous computation while maintaining
// access to the shared context.
//
// Example:
//
// // An operator that transforms int readers to string readers
// intToString := func(r Reader[int]) Reader[string] {
// return func(ctx context.Context) string {
// value := r(ctx)
// return strconv.Itoa(value)
// }
// }
//
// // A Reader that extracts a timeout value from context
// getTimeout := func(ctx context.Context) int {
// if deadline, ok := ctx.Deadline(); ok {
// return int(time.Until(deadline).Seconds())
// }
// return 0
// }
//
// // Transform the Reader
// getTimeoutStr := intToString(getTimeout)
// ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
// defer cancel()
// result := getTimeoutStr(ctx) // "30" (approximately)
Operator[A, B any] = Kleisli[Reader[A], B]
)

View File

@@ -452,5 +452,3 @@ func BenchmarkWithResource(b *testing.B) {
operation(ctx)()
}
}
// Made with Bob

View File

@@ -21,6 +21,7 @@ import (
"github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/pair"
RIO "github.com/IBM/fp-go/v2/readerio"
)
// Promap is the profunctor map operation that transforms both the input and output of a context-based ReaderIO.
@@ -35,21 +36,24 @@ import (
// The function f returns both a new context and a CancelFunc that should be called to release resources.
//
// Type Parameters:
// - R: The input environment type that f transforms into context.Context
// - A: The original result type produced by the ReaderIO
// - B: The new output result type
//
// Parameters:
// - f: Function to transform the input context (contravariant)
// - f: Function to transform the input environment R into context.Context (contravariant)
// - g: Function to transform the output value from A to B (covariant)
//
// Returns:
// - An Operator that takes a ReaderIO[A] and returns a ReaderIO[B]
// - A Kleisli arrow that takes a ReaderIO[A] and returns a function from R to B
//
// Note: When R is context.Context, this simplifies to an Operator[A, B]
//
//go:inline
func Promap[A, B any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context], g func(A) B) Operator[A, B] {
func Promap[R, A, B any](f pair.Kleisli[context.CancelFunc, R, context.Context], g func(A) B) RIO.Kleisli[R, ReaderIO[A], B] {
return function.Flow2(
Local[A](f),
Map(g),
RIO.Map[R](g),
)
}
@@ -63,15 +67,18 @@ func Promap[A, B any](f pair.Kleisli[context.CancelFunc, context.Context, contex
//
// Type Parameters:
// - A: The result type (unchanged)
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: Function to transform the context, returning a new context and CancelFunc
// - f: Function to transform the input environment R into context.Context, returning a new context and CancelFunc
//
// Returns:
// - An Operator that takes a ReaderIO[A] and returns a ReaderIO[A]
// - A Kleisli arrow that takes a ReaderIO[A] and returns a function from R to A
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
//
//go:inline
func Contramap[A any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context]) Operator[A, A] {
func Contramap[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RIO.Kleisli[R, ReaderIO[A], A] {
return Local[A](f)
}

View File

@@ -634,12 +634,15 @@ func ReadIO[A any](r IO[context.Context]) func(ReaderIO[A]) IO[A] {
//
// Type Parameters:
// - A: The value type of the ReaderIO
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: A function that transforms the context and returns a cancel function
// - f: A function that transforms the input environment R into context.Context and returns a cancel function
//
// Returns:
// - An Operator that runs the computation with the transformed context
// - A Kleisli arrow that runs the computation with the transformed context
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
//
// Example:
//
@@ -649,9 +652,9 @@ func ReadIO[A any](r IO[context.Context]) func(ReaderIO[A]) IO[A] {
// type key int
// const userKey key = 0
//
// addUser := readerio.Local[string](func(ctx context.Context) (context.Context, context.CancelFunc) {
// addUser := readerio.Local[string, context.Context](func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
// newCtx := context.WithValue(ctx, userKey, "Alice")
// return newCtx, func() {} // No-op cancel
// return pair.MakePair(func() {}, newCtx) // No-op cancel
// })
//
// getUser := readerio.FromReader(func(ctx context.Context) string {
@@ -670,19 +673,20 @@ func ReadIO[A any](r IO[context.Context]) func(ReaderIO[A]) IO[A] {
// Timeout Example:
//
// // Add a 5-second timeout to a specific operation
// withTimeout := readerio.Local[Data](func(ctx context.Context) (context.Context, context.CancelFunc) {
// return context.WithTimeout(ctx, 5*time.Second)
// withTimeout := readerio.Local[Data, context.Context](func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
// newCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
// return pair.MakePair(cancel, newCtx)
// })
//
// result := F.Pipe1(
// fetchData,
// withTimeout,
// )
func Local[A any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context]) Operator[A, A] {
return func(rr ReaderIO[A]) ReaderIO[A] {
return func(ctx context.Context) IO[A] {
func Local[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RIO.Kleisli[R, ReaderIO[A], A] {
return func(rr ReaderIO[A]) RIO.ReaderIO[R, A] {
return func(r R) IO[A] {
return func() A {
otherCancel, otherCtx := pair.Unpack(f(ctx))
otherCancel, otherCtx := pair.Unpack(f(r))
defer otherCancel()
return rr(otherCtx)()
}

View File

@@ -413,5 +413,3 @@ func isRight[A any](res Result[A]) bool {
func isLeft[A any](res Result[A]) bool {
return result.IsLeft(res)
}
// Made with Bob

View File

@@ -22,6 +22,7 @@ import (
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioresult"
"github.com/IBM/fp-go/v2/pair"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
"github.com/IBM/fp-go/v2/result"
)
@@ -38,21 +39,24 @@ import (
// The error type is fixed as error and remains unchanged through the transformation.
//
// Type Parameters:
// - R: The input environment type that f transforms into context.Context
// - A: The original success type produced by the ReaderIOResult
// - B: The new output success type
//
// Parameters:
// - f: Function to transform the input context (contravariant)
// - f: Function to transform the input environment R into context.Context (contravariant)
// - g: Function to transform the output success value from A to B (covariant)
//
// Returns:
// - An Operator that takes a ReaderIOResult[A] and returns a ReaderIOResult[B]
// - A Kleisli arrow that takes a ReaderIOResult[A] and returns a function from R to B
//
// Note: When R is context.Context, this simplifies to an Operator[A, B]
//
//go:inline
func Promap[A, B any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context], g func(A) B) Operator[A, B] {
func Promap[R, A, B any](f pair.Kleisli[context.CancelFunc, R, context.Context], g func(A) B) RIOR.Kleisli[R, ReaderIOResult[A], B] {
return function.Flow2(
Local[A](f),
Map(g),
RIOR.Map[R](g),
)
}
@@ -66,15 +70,18 @@ func Promap[A, B any](f pair.Kleisli[context.CancelFunc, context.Context, contex
//
// Type Parameters:
// - A: The success type (unchanged)
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: Function to transform the context, returning a new context and CancelFunc
// - f: Function to transform the input environment R into context.Context, returning a new context and CancelFunc
//
// Returns:
// - An Operator that takes a ReaderIOResult[A] and returns a ReaderIOResult[A]
// - A Kleisli arrow that takes a ReaderIOResult[A] and returns a function from R to A
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
//
//go:inline
func Contramap[A any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context]) Operator[A, A] {
func Contramap[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RIOR.Kleisli[R, ReaderIOResult[A], A] {
return Local[A](f)
}

View File

@@ -32,7 +32,6 @@ import (
"github.com/IBM/fp-go/v2/reader"
RIOR "github.com/IBM/fp-go/v2/readerioresult"
"github.com/IBM/fp-go/v2/readeroption"
"github.com/IBM/fp-go/v2/result"
)
const (
@@ -1011,12 +1010,15 @@ func TapLeftIOK[A, B any](f io.Kleisli[error, B]) Operator[A, A] {
//
// Type Parameters:
// - A: The value type of the ReaderIOResult
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: A function that transforms the context and returns a cancel function
// - f: A function that transforms the input environment R into context.Context and returns a cancel function
//
// Returns:
// - An Operator that runs the computation with the transformed context
// - A Kleisli arrow that runs the computation with the transformed context
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
//
// Example:
//
@@ -1026,9 +1028,9 @@ func TapLeftIOK[A, B any](f io.Kleisli[error, B]) Operator[A, A] {
// type key int
// const userKey key = 0
//
// addUser := readerioresult.Local[string](func(ctx context.Context) (context.Context, context.CancelFunc) {
// addUser := readerioresult.Local[string, context.Context](func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
// newCtx := context.WithValue(ctx, userKey, "Alice")
// return newCtx, func() {} // No-op cancel
// return pair.MakePair(func() {}, newCtx) // No-op cancel
// })
//
// getUser := readerioresult.FromReader(func(ctx context.Context) string {
@@ -1047,27 +1049,19 @@ func TapLeftIOK[A, B any](f io.Kleisli[error, B]) Operator[A, A] {
// Timeout Example:
//
// // Add a 5-second timeout to a specific operation
// withTimeout := readerioresult.Local[Data](func(ctx context.Context) (context.Context, context.CancelFunc) {
// return context.WithTimeout(ctx, 5*time.Second)
// withTimeout := readerioresult.Local[Data, context.Context](func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
// newCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
// return pair.MakePair(cancel, newCtx)
// })
//
// result := F.Pipe1(
// fetchData,
// withTimeout,
// )
func Local[A any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context]) Operator[A, A] {
return func(rr ReaderIOResult[A]) ReaderIOResult[A] {
return func(ctx context.Context) IOResult[A] {
return func() Result[A] {
if ctx.Err() != nil {
return result.Left[A](context.Cause(ctx))
}
otherCancel, otherCtx := pair.Unpack(f(ctx))
defer otherCancel()
return rr(otherCtx)()
}
}
}
//
//go:inline
func Local[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RIOR.Kleisli[R, ReaderIOResult[A], A] {
return readerio.Local[Result[A]](f)
}
// WithTimeout adds a timeout to the context for a ReaderIOResult computation.

View File

@@ -3,6 +3,7 @@ package readerreaderioresult
import (
"context"
"github.com/IBM/fp-go/v2/context/reader"
"github.com/IBM/fp-go/v2/context/readerioresult"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioresult"
@@ -13,6 +14,17 @@ import (
// Local modifies the outer environment before passing it to a computation.
// Useful for providing different configurations to sub-computations.
//
// Type Parameters:
// - A: The success type produced by the ReaderReaderIOResult
// - R1: The original outer environment type expected by the ReaderReaderIOResult
// - R2: The new input outer environment type
//
// Parameters:
// - f: A function that transforms R2 to R1
//
// Returns:
// - A function that takes a ReaderReaderIOResult[R1, A] and returns a ReaderReaderIOResult[R2, A]
//
//go:inline
func Local[A, R1, R2 any](f func(R2) R1) func(ReaderReaderIOResult[R1, A]) ReaderReaderIOResult[R2, A] {
return RRIOE.Local[context.Context, error, A](f)
@@ -185,6 +197,65 @@ func LocalReaderIOResultK[A, R1, R2 any](f readerioresult.Kleisli[R2, R1]) func(
return RRIOE.LocalReaderIOEitherK[A](f)
}
// LocalReaderK transforms the outer environment of a ReaderReaderIOResult using a Reader-based Kleisli arrow.
// It allows you to modify the outer environment through a pure computation that depends on the inner context
// before passing it to the ReaderReaderIOResult.
//
// This is useful when the outer environment transformation is a pure computation that requires access
// to the inner context (e.g., context.Context) but cannot fail. Common use cases include:
// - Extracting configuration from context values
// - Computing derived environment values based on context
// - Transforming environment based on context metadata
//
// The transformation happens in two stages:
// 1. The Reader function f is executed with the R2 outer environment and inner context to produce an R1 value
// 2. The resulting R1 value is passed as the outer environment to the ReaderReaderIOResult[R1, A]
//
// Type Parameters:
// - A: The success type produced by the ReaderReaderIOResult
// - R1: The original outer environment type expected by the ReaderReaderIOResult
// - R2: The new input outer environment type
//
// Parameters:
// - f: A Reader Kleisli arrow that transforms R2 to R1 using the inner context
//
// Returns:
// - A function that takes a ReaderReaderIOResult[R1, A] and returns a ReaderReaderIOResult[R2, A]
//
// Example Usage:
//
// type ctxKey string
// const configKey ctxKey = "config"
//
// // Extract config from context and transform environment
// extractConfig := func(path string) reader.Reader[DetailedConfig] {
// return func(ctx context.Context) DetailedConfig {
// if cfg, ok := ctx.Value(configKey).(DetailedConfig); ok {
// return cfg
// }
// return DetailedConfig{Host: "localhost", Port: 8080}
// }
// }
//
// // Use the config
// useConfig := func(cfg DetailedConfig) readerioresult.ReaderIOResult[string] {
// return func(ctx context.Context) ioresult.IOResult[string] {
// return func() result.Result[string] {
// return result.Of(fmt.Sprintf("%s:%d", cfg.Host, cfg.Port))
// }
// }
// }
//
// // Compose using LocalReaderK
// adapted := LocalReaderK[string](extractConfig)(useConfig)
// ctx := context.WithValue(context.Background(), configKey, DetailedConfig{Host: "api.example.com", Port: 443})
// result := adapted("config.json")(ctx)() // Result: "api.example.com:443"
//
//go:inline
func LocalReaderK[A, R1, R2 any](f reader.Kleisli[R2, R1]) func(ReaderReaderIOResult[R1, A]) ReaderReaderIOResult[R2, A] {
return RRIOE.LocalReaderK[error, A](f)
}
// LocalReaderReaderIOEitherK transforms the outer environment of a ReaderReaderIOResult using a ReaderReaderIOResult-based Kleisli arrow.
// It allows you to modify the outer environment through a computation that depends on both the outer environment
// and the inner context, and can perform IO effects that may fail.

View File

@@ -21,6 +21,7 @@ import (
"fmt"
"testing"
"github.com/IBM/fp-go/v2/context/reader"
"github.com/IBM/fp-go/v2/context/readerioresult"
"github.com/IBM/fp-go/v2/io"
"github.com/IBM/fp-go/v2/ioresult"
@@ -426,3 +427,226 @@ func TestLocalReaderIOResultK(t *testing.T) {
assert.True(t, result.IsLeft(resErr))
})
}
// TestLocalReaderK tests LocalReaderK functionality
func TestLocalReaderK(t *testing.T) {
ctx := context.Background()
t.Run("basic Reader transformation", func(t *testing.T) {
// Reader that transforms string path to SimpleConfig using context
loadConfig := func(path string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
// Could extract values from context here
return SimpleConfig{Port: 8080}
}
}
// ReaderReaderIOResult that uses the config
useConfig := func(cfg SimpleConfig) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Of(fmt.Sprintf("Port: %d", cfg.Port))
}
}
}
// Compose using LocalReaderK
adapted := LocalReaderK[string](loadConfig)(useConfig)
res := adapted("config.json")(ctx)()
assert.Equal(t, result.Of("Port: 8080"), res)
})
t.Run("extract config from context", func(t *testing.T) {
type ctxKey string
const configKey ctxKey = "config"
// Reader that extracts config from context
extractConfig := func(path string) reader.Reader[DetailedConfig] {
return func(ctx context.Context) DetailedConfig {
if cfg, ok := ctx.Value(configKey).(DetailedConfig); ok {
return cfg
}
// Default config if not in context
return DetailedConfig{Host: "localhost", Port: 8080}
}
}
// Use the config
useConfig := func(cfg DetailedConfig) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Of(fmt.Sprintf("%s:%d", cfg.Host, cfg.Port))
}
}
}
adapted := LocalReaderK[string](extractConfig)(useConfig)
// With context value
ctxWithConfig := context.WithValue(ctx, configKey, DetailedConfig{Host: "api.example.com", Port: 443})
res := adapted("ignored")(ctxWithConfig)()
assert.Equal(t, result.Of("api.example.com:443"), res)
// Without context value (uses default)
resDefault := adapted("ignored")(ctx)()
assert.Equal(t, result.Of("localhost:8080"), resDefault)
})
t.Run("context-aware transformation", func(t *testing.T) {
type ctxKey string
const multiplierKey ctxKey = "multiplier"
// Reader that uses context to compute environment
computeValue := func(base int) reader.Reader[int] {
return func(ctx context.Context) int {
if mult, ok := ctx.Value(multiplierKey).(int); ok {
return base * mult
}
return base
}
}
// Use the computed value
formatValue := func(val int) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Of(fmt.Sprintf("Value: %d", val))
}
}
}
adapted := LocalReaderK[string](computeValue)(formatValue)
// With multiplier in context
ctxWithMult := context.WithValue(ctx, multiplierKey, 10)
res := adapted(5)(ctxWithMult)()
assert.Equal(t, result.Of("Value: 50"), res)
// Without multiplier (uses base value)
resBase := adapted(5)(ctx)()
assert.Equal(t, result.Of("Value: 5"), resBase)
})
t.Run("compose multiple LocalReaderK", func(t *testing.T) {
type ctxKey string
const prefixKey ctxKey = "prefix"
// First transformation: int -> string using context
intToString := func(n int) reader.Reader[string] {
return func(ctx context.Context) string {
if prefix, ok := ctx.Value(prefixKey).(string); ok {
return fmt.Sprintf("%s-%d", prefix, n)
}
return fmt.Sprintf("%d", n)
}
}
// Second transformation: string -> SimpleConfig
stringToConfig := func(s string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
return SimpleConfig{Port: len(s) * 100}
}
}
// Use the config
formatConfig := func(cfg SimpleConfig) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Of(fmt.Sprintf("Port: %d", cfg.Port))
}
}
}
// Compose transformations
step1 := LocalReaderK[string](stringToConfig)(formatConfig)
step2 := LocalReaderK[string](intToString)(step1)
// With prefix in context
ctxWithPrefix := context.WithValue(ctx, prefixKey, "test")
res := step2(42)(ctxWithPrefix)()
// "test-42" has length 7, so port = 700
assert.Equal(t, result.Of("Port: 700"), res)
// Without prefix
resNoPrefix := step2(42)(ctx)()
// "42" has length 2, so port = 200
assert.Equal(t, result.Of("Port: 200"), resNoPrefix)
})
t.Run("error propagation in ReaderReaderIOResult", func(t *testing.T) {
// Reader transformation (pure, cannot fail)
loadConfig := func(path string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
return SimpleConfig{Port: 8080}
}
}
// ReaderReaderIOResult that returns an error
failingOperation := func(cfg SimpleConfig) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Left[string](errors.New("operation failed"))
}
}
}
adapted := LocalReaderK[string](loadConfig)(failingOperation)
res := adapted("config.json")(ctx)()
// Error from the ReaderReaderIOResult should propagate
assert.True(t, result.IsLeft(res))
})
t.Run("real-world: environment selection based on context", func(t *testing.T) {
type Environment string
const (
Dev Environment = "dev"
Prod Environment = "prod"
)
type ctxKey string
const envKey ctxKey = "environment"
type EnvConfig struct {
Name string
}
// Reader that selects config based on context environment
selectConfig := func(envName EnvConfig) reader.Reader[DetailedConfig] {
return func(ctx context.Context) DetailedConfig {
env := Dev
if e, ok := ctx.Value(envKey).(Environment); ok {
env = e
}
switch env {
case Prod:
return DetailedConfig{Host: "api.production.com", Port: 443}
default:
return DetailedConfig{Host: "localhost", Port: 8080}
}
}
}
// Use the selected config
useConfig := func(cfg DetailedConfig) readerioresult.ReaderIOResult[string] {
return func(ctx context.Context) ioresult.IOResult[string] {
return func() result.Result[string] {
return result.Of(fmt.Sprintf("Connecting to %s:%d", cfg.Host, cfg.Port))
}
}
}
adapted := LocalReaderK[string](selectConfig)(useConfig)
// Production environment
ctxProd := context.WithValue(ctx, envKey, Prod)
resProd := adapted(EnvConfig{Name: "app"})(ctxProd)()
assert.Equal(t, result.Of("Connecting to api.production.com:443"), resProd)
// Development environment (default)
resDev := adapted(EnvConfig{Name: "app"})(ctx)()
assert.Equal(t, result.Of("Connecting to localhost:8080"), resDev)
})
}

View File

@@ -19,6 +19,8 @@ import (
"context"
"github.com/IBM/fp-go/v2/function"
"github.com/IBM/fp-go/v2/pair"
RR "github.com/IBM/fp-go/v2/readerresult"
)
// Promap is the profunctor map operation that transforms both the input and output of a context-based ReaderResult.
@@ -34,21 +36,24 @@ import (
// The error type is fixed as error and remains unchanged through the transformation.
//
// Type Parameters:
// - R: The input environment type that f transforms into context.Context
// - A: The original success type produced by the ReaderResult
// - B: The new output success type
//
// Parameters:
// - f: Function to transform the input context (contravariant)
// - f: Function to transform the input environment R into context.Context (contravariant)
// - g: Function to transform the output success value from A to B (covariant)
//
// Returns:
// - An Operator that takes a ReaderResult[A] and returns a ReaderResult[B]
// - A Kleisli arrow that takes a ReaderResult[A] and returns a function from R to B
//
// Note: When R is context.Context, this simplifies to an Operator[A, B]
//
//go:inline
func Promap[A, B any](f func(context.Context) (context.Context, context.CancelFunc), g func(A) B) Operator[A, B] {
func Promap[R, A, B any](f pair.Kleisli[context.CancelFunc, R, context.Context], g func(A) B) RR.Kleisli[R, ReaderResult[A], B] {
return function.Flow2(
Local[A](f),
Map(g),
RR.Map[R](g),
)
}
@@ -62,15 +67,18 @@ func Promap[A, B any](f func(context.Context) (context.Context, context.CancelFu
//
// Type Parameters:
// - A: The success type (unchanged)
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: Function to transform the context, returning a new context and CancelFunc
// - f: Function to transform the input environment R into context.Context, returning a new context and CancelFunc
//
// Returns:
// - An Operator that takes a ReaderResult[A] and returns a ReaderResult[A]
// - A Kleisli arrow that takes a ReaderResult[A] and returns a function from R to A
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
//
//go:inline
func Contramap[A any](f func(context.Context) (context.Context, context.CancelFunc)) Operator[A, A] {
func Contramap[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RR.Kleisli[R, ReaderResult[A], A] {
return Local[A](f)
}
@@ -89,16 +97,19 @@ func Contramap[A any](f func(context.Context) (context.Context, context.CancelFu
//
// Type Parameters:
// - A: The result type (unchanged)
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: Function to transform the context, returning a new context and CancelFunc
// - f: Function to transform the input environment R into context.Context, returning a new context and CancelFunc
//
// Returns:
// - An Operator that takes a ReaderResult[A] and returns a ReaderResult[A]
func Local[A any](f func(context.Context) (context.Context, context.CancelFunc)) Operator[A, A] {
return func(rr ReaderResult[A]) ReaderResult[A] {
return func(ctx context.Context) Result[A] {
otherCtx, otherCancel := f(ctx)
// - A Kleisli arrow that takes a ReaderResult[A] and returns a function from R to A
//
// Note: When R is context.Context, this simplifies to an Operator[A, A]
func Local[A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) RR.Kleisli[R, ReaderResult[A], A] {
return func(rr ReaderResult[A]) RR.ReaderResult[R, A] {
return func(r R) Result[A] {
otherCancel, otherCtx := pair.Unpack(f(r))
defer otherCancel()
return rr(otherCtx)
}

View File

@@ -20,6 +20,7 @@ import (
"strconv"
"testing"
"github.com/IBM/fp-go/v2/pair"
R "github.com/IBM/fp-go/v2/result"
"github.com/stretchr/testify/assert"
)
@@ -34,9 +35,9 @@ func TestPromapBasic(t *testing.T) {
return R.Of(0)
}
addKey := func(ctx context.Context) (context.Context, context.CancelFunc) {
addKey := func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
newCtx := context.WithValue(ctx, "key", 42)
return newCtx, func() {}
return pair.MakePair[context.CancelFunc](func() {}, newCtx)
}
toString := strconv.Itoa
@@ -57,9 +58,9 @@ func TestContramapBasic(t *testing.T) {
return R.Of(0)
}
addKey := func(ctx context.Context) (context.Context, context.CancelFunc) {
addKey := func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
newCtx := context.WithValue(ctx, "key", 100)
return newCtx, func() {}
return pair.MakePair[context.CancelFunc](func() {}, newCtx)
}
adapted := Contramap[int](addKey)(getValue)
@@ -79,9 +80,9 @@ func TestLocalBasic(t *testing.T) {
return R.Of("unknown")
}
addUser := func(ctx context.Context) (context.Context, context.CancelFunc) {
addUser := func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
newCtx := context.WithValue(ctx, "user", "Alice")
return newCtx, func() {}
return pair.MakePair[context.CancelFunc](func() {}, newCtx)
}
adapted := Local[string](addUser)(getValue)

View File

@@ -23,6 +23,7 @@ import (
"github.com/IBM/fp-go/v2/internal/statet"
"github.com/IBM/fp-go/v2/pair"
"github.com/IBM/fp-go/v2/result"
SRIOE "github.com/IBM/fp-go/v2/statereaderioeither"
)
// Left creates a StateReaderIOResult that represents a failed computation with the given error.
@@ -215,24 +216,28 @@ func FromResult[S, A any](ma Result[A]) StateReaderIOResult[S, A] {
// Type Parameters:
// - S: The state type
// - A: The result type
// - R: The input environment type that f transforms into context.Context
//
// Parameters:
// - f: Function to transform the context, returning a new context and CancelFunc
// - f: Function to transform the input environment R into context.Context, returning a new context and CancelFunc
//
// Returns:
// - A function that takes a StateReaderIOResult[S, A] and returns a StateReaderIOResult[S, A]
// - A Kleisli arrow that takes a StateReaderIOResult[S, A] and returns a StateReaderIOEither[S, R, error, A]
//
// Note: When R is context.Context, the return type simplifies to func(StateReaderIOResult[S, A]) StateReaderIOResult[S, A]
//
// Example:
//
// // Add a timeout to a specific operation
// withTimeout := statereaderioresult.Local[AppState, Data](
// func(ctx context.Context) (context.Context, context.CancelFunc) {
// return context.WithTimeout(ctx, 60*time.Second)
// withTimeout := statereaderioresult.Local[AppState, Data, context.Context](
// func(ctx context.Context) pair.Pair[context.CancelFunc, context.Context] {
// newCtx, cancel := context.WithTimeout(ctx, 60*time.Second)
// return pair.MakePair(cancel, newCtx)
// },
// )
// result := withTimeout(computation)
func Local[S, A any](f pair.Kleisli[context.CancelFunc, context.Context, context.Context]) Operator[S, A, A] {
return func(ma StateReaderIOResult[S, A]) StateReaderIOResult[S, A] {
func Local[S, A, R any](f pair.Kleisli[context.CancelFunc, R, context.Context]) SRIOE.Kleisli[S, R, error, StateReaderIOResult[S, A], A] {
return func(ma StateReaderIOResult[S, A]) SRIOE.StateReaderIOEither[S, R, error, A] {
return function.Flow2(ma, RIORES.Local[Pair[S, A]](f))
}
}

View File

@@ -16,6 +16,7 @@
package effect
import (
"github.com/IBM/fp-go/v2/context/reader"
thunk "github.com/IBM/fp-go/v2/context/readerioresult"
"github.com/IBM/fp-go/v2/context/readerreaderioresult"
"github.com/IBM/fp-go/v2/io"
@@ -267,10 +268,89 @@ func LocalThunkK[A, C1, C2 any](f thunk.Kleisli[C2, C1]) func(Effect[C1, A]) Eff
// - Local/Contramap: Pure context transformation (C2 -> C1)
// - LocalIOK: IO-based transformation (C2 -> IO[C1])
// - LocalIOResultK: IO with error handling (C2 -> IOResult[C1])
// - LocalReaderIOResultK: Reader-based with IO and errors (C2 -> ReaderIOResult[C1])
// - LocalThunkK: Reader-based with IO and errors (C2 -> ReaderIOResult[C1])
// - LocalEffectK: Full Effect transformation (C2 -> Effect[C2, C1])
//
//go:inline
func LocalEffectK[A, C1, C2 any](f Kleisli[C2, C2, C1]) func(Effect[C1, A]) Effect[C2, A] {
return readerreaderioresult.LocalReaderReaderIOEitherK[A](f)
}
// LocalReaderK transforms the context of an Effect using a Reader-based Kleisli arrow.
// It allows you to modify the context through a pure computation that depends on the runtime context
// before passing it to the Effect.
//
// This is useful when the context transformation is a pure computation that requires access
// to the runtime context (context.Context) but cannot fail. Common use cases include:
// - Extracting configuration from context values
// - Computing derived context values based on runtime context
// - Transforming context based on runtime metadata
//
// The transformation happens in two stages:
// 1. The Reader function f is executed with the C2 context and runtime context to produce a C1 value
// 2. The resulting C1 value is passed as the context to the Effect[C1, A]
//
// # Type Parameters
//
// - A: The value type produced by the effect
// - C1: The inner context type (required by the original effect)
// - C2: The outer context type (provided to the transformed effect)
//
// # Parameters
//
// - f: A Reader Kleisli arrow that transforms C2 to C1 using the runtime context
//
// # Returns
//
// - func(Effect[C1, A]) Effect[C2, A]: A function that adapts the effect to use C2
//
// # Example
//
// type ctxKey string
// const configKey ctxKey = "config"
//
// type DetailedConfig struct {
// Host string
// Port int
// }
//
// type SimpleConfig struct {
// Port int
// }
//
// // Extract config from runtime context and transform
// extractConfig := func(path string) reader.Reader[DetailedConfig] {
// return func(ctx context.Context) DetailedConfig {
// if cfg, ok := ctx.Value(configKey).(DetailedConfig); ok {
// return cfg
// }
// return DetailedConfig{Host: "localhost", Port: 8080}
// }
// }
//
// // Effect that uses DetailedConfig
// configEffect := effect.Of[DetailedConfig]("connected")
//
// // Transform to use string path instead
// transform := effect.LocalReaderK[string](extractConfig)
// pathEffect := transform(configEffect)
//
// // Run with runtime context containing config
// ctx := context.WithValue(context.Background(), configKey, DetailedConfig{Host: "api.example.com", Port: 443})
// ioResult := effect.Provide[string]("config.json")(pathEffect)
// readerResult := effect.RunSync(ioResult)
// result, err := readerResult(ctx) // Uses config from context
//
// # Comparison with other Local functions
//
// - Local/Contramap: Pure context transformation (C2 -> C1)
// - LocalIOK: IO-based transformation (C2 -> IO[C1])
// - LocalIOResultK: IO with error handling (C2 -> IOResult[C1])
// - LocalReaderK: Reader-based pure transformation with runtime context access (C2 -> Reader[C1])
// - LocalThunkK: Reader-based with IO and errors (C2 -> ReaderIOResult[C1])
// - LocalEffectK: Full Effect transformation (C2 -> Effect[C2, C1])
//
//go:inline
func LocalReaderK[A, C1, C2 any](f reader.Kleisli[C2, C1]) func(Effect[C1, A]) Effect[C2, A] {
return readerreaderioresult.LocalReaderK[A](f)
}

View File

@@ -19,7 +19,9 @@ import (
"context"
"fmt"
"testing"
"time"
"github.com/IBM/fp-go/v2/context/reader"
"github.com/IBM/fp-go/v2/context/readerreaderioresult"
"github.com/stretchr/testify/assert"
)
@@ -618,3 +620,347 @@ func TestLocalEffectK(t *testing.T) {
assert.Equal(t, 60, result) // 3 * 10 * 2
})
}
func TestLocalReaderK(t *testing.T) {
t.Run("basic Reader transformation", func(t *testing.T) {
type SimpleConfig struct {
Port int
}
// Reader that transforms string path to SimpleConfig using runtime context
loadConfig := func(path string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
// Could extract values from runtime context here
return SimpleConfig{Port: 8080}
}
}
// Effect that uses the config
configEffect := Of[SimpleConfig]("connected")
// Transform using LocalReaderK
transform := LocalReaderK[string](loadConfig)
pathEffect := transform(configEffect)
// Run with path
ioResult := Provide[string]("config.json")(pathEffect)
readerResult := RunSync(ioResult)
result, err := readerResult(context.Background())
assert.NoError(t, err)
assert.Equal(t, "connected", result)
})
t.Run("extract config from runtime context", func(t *testing.T) {
type ctxKey string
const configKey ctxKey = "config"
type DetailedConfig struct {
Host string
Port int
}
// Reader that extracts config from runtime context
extractConfig := func(path string) reader.Reader[DetailedConfig] {
return func(ctx context.Context) DetailedConfig {
if cfg, ok := ctx.Value(configKey).(DetailedConfig); ok {
return cfg
}
// Default config if not in runtime context
return DetailedConfig{Host: "localhost", Port: 8080}
}
}
// Effect that uses the config
configEffect := Chain(func(cfg DetailedConfig) Effect[DetailedConfig, string] {
return Of[DetailedConfig](fmt.Sprintf("%s:%d", cfg.Host, cfg.Port))
})(readerreaderioresult.Ask[DetailedConfig]())
transform := LocalReaderK[string](extractConfig)
pathEffect := transform(configEffect)
// With config in runtime context
ctxWithConfig := context.WithValue(context.Background(), configKey, DetailedConfig{Host: "api.example.com", Port: 443})
ioResult := Provide[string]("ignored")(pathEffect)
readerResult := RunSync(ioResult)
result, err := readerResult(ctxWithConfig)
assert.NoError(t, err)
assert.Equal(t, "api.example.com:443", result)
// Without config in runtime context (uses default)
ioResult2 := Provide[string]("ignored")(pathEffect)
readerResult2 := RunSync(ioResult2)
result2, err2 := readerResult2(context.Background())
assert.NoError(t, err2)
assert.Equal(t, "localhost:8080", result2)
})
t.Run("runtime context-aware transformation", func(t *testing.T) {
type ctxKey string
const multiplierKey ctxKey = "multiplier"
// Reader that uses runtime context to compute context
computeValue := func(base int) reader.Reader[int] {
return func(ctx context.Context) int {
if mult, ok := ctx.Value(multiplierKey).(int); ok {
return base * mult
}
return base
}
}
// Effect that uses the computed value
valueEffect := Chain(func(val int) Effect[int, string] {
return Of[int](fmt.Sprintf("Value: %d", val))
})(readerreaderioresult.Ask[int]())
transform := LocalReaderK[string](computeValue)
baseEffect := transform(valueEffect)
// With multiplier in runtime context
ctxWithMult := context.WithValue(context.Background(), multiplierKey, 10)
ioResult := Provide[string](5)(baseEffect)
readerResult := RunSync(ioResult)
result, err := readerResult(ctxWithMult)
assert.NoError(t, err)
assert.Equal(t, "Value: 50", result)
// Without multiplier (uses base value)
ioResult2 := Provide[string](5)(baseEffect)
readerResult2 := RunSync(ioResult2)
result2, err2 := readerResult2(context.Background())
assert.NoError(t, err2)
assert.Equal(t, "Value: 5", result2)
})
t.Run("compose multiple LocalReaderK", func(t *testing.T) {
type ctxKey string
const prefixKey ctxKey = "prefix"
// First transformation: int -> string using runtime context
intToString := func(n int) reader.Reader[string] {
return func(ctx context.Context) string {
if prefix, ok := ctx.Value(prefixKey).(string); ok {
return fmt.Sprintf("%s-%d", prefix, n)
}
return fmt.Sprintf("%d", n)
}
}
// Second transformation: string -> SimpleConfig
type SimpleConfig struct {
Port int
}
stringToConfig := func(s string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
return SimpleConfig{Port: len(s) * 100}
}
}
// Effect that uses the config
configEffect := Chain(func(cfg SimpleConfig) Effect[SimpleConfig, string] {
return Of[SimpleConfig](fmt.Sprintf("Port: %d", cfg.Port))
})(readerreaderioresult.Ask[SimpleConfig]())
// Compose transformations
step1 := LocalReaderK[string](stringToConfig)
step2 := LocalReaderK[string](intToString)
effect1 := step1(configEffect)
effect2 := step2(effect1)
// With prefix in runtime context
ctxWithPrefix := context.WithValue(context.Background(), prefixKey, "test")
ioResult := Provide[string](42)(effect2)
readerResult := RunSync(ioResult)
result, err := readerResult(ctxWithPrefix)
assert.NoError(t, err)
// "test-42" has length 7, so port = 700
assert.Equal(t, "Port: 700", result)
// Without prefix
ioResult2 := Provide[string](42)(effect2)
readerResult2 := RunSync(ioResult2)
result2, err2 := readerResult2(context.Background())
assert.NoError(t, err2)
// "42" has length 2, so port = 200
assert.Equal(t, "Port: 200", result2)
})
t.Run("error propagation from Effect", func(t *testing.T) {
type SimpleConfig struct {
Port int
}
// Reader transformation (pure, cannot fail)
loadConfig := func(path string) reader.Reader[SimpleConfig] {
return func(ctx context.Context) SimpleConfig {
return SimpleConfig{Port: 8080}
}
}
// Effect that returns an error
expectedErr := assert.AnError
failingEffect := Fail[SimpleConfig, string](expectedErr)
transform := LocalReaderK[string](loadConfig)
pathEffect := transform(failingEffect)
ioResult := Provide[string]("config.json")(pathEffect)
readerResult := RunSync(ioResult)
_, err := readerResult(context.Background())
// Error from the Effect should propagate
assert.Error(t, err)
assert.Equal(t, expectedErr, err)
})
t.Run("real-world: environment selection based on runtime context", func(t *testing.T) {
type Environment string
const (
Dev Environment = "dev"
Prod Environment = "prod"
)
type ctxKey string
const envKey ctxKey = "environment"
type EnvConfig struct {
Name string
}
type DetailedConfig struct {
Host string
Port int
}
// Reader that selects config based on runtime context environment
selectConfig := func(envName EnvConfig) reader.Reader[DetailedConfig] {
return func(ctx context.Context) DetailedConfig {
env := Dev
if e, ok := ctx.Value(envKey).(Environment); ok {
env = e
}
switch env {
case Prod:
return DetailedConfig{Host: "api.production.com", Port: 443}
default:
return DetailedConfig{Host: "localhost", Port: 8080}
}
}
}
// Effect that uses the selected config
configEffect := Chain(func(cfg DetailedConfig) Effect[DetailedConfig, string] {
return Of[DetailedConfig](fmt.Sprintf("Connecting to %s:%d", cfg.Host, cfg.Port))
})(readerreaderioresult.Ask[DetailedConfig]())
transform := LocalReaderK[string](selectConfig)
envEffect := transform(configEffect)
// Production environment
ctxProd := context.WithValue(context.Background(), envKey, Prod)
ioResult := Provide[string](EnvConfig{Name: "app"})(envEffect)
readerResult := RunSync(ioResult)
result, err := readerResult(ctxProd)
assert.NoError(t, err)
assert.Equal(t, "Connecting to api.production.com:443", result)
// Development environment (default)
ioResult2 := Provide[string](EnvConfig{Name: "app"})(envEffect)
readerResult2 := RunSync(ioResult2)
result2, err2 := readerResult2(context.Background())
assert.NoError(t, err2)
assert.Equal(t, "Connecting to localhost:8080", result2)
})
t.Run("composes with other Local functions", func(t *testing.T) {
type Level1 struct {
Value string
}
type Level2 struct {
Data string
}
type Level3 struct {
Info string
}
// Effect at deepest level
effect3 := Of[Level3]("result")
// Use LocalReaderK for first transformation (with runtime context access)
localReaderK23 := LocalReaderK[string](func(l2 Level2) reader.Reader[Level3] {
return func(ctx context.Context) Level3 {
return Level3{Info: l2.Data}
}
})
// Use Local for second transformation (pure)
local12 := Local[string](func(l1 Level1) Level2 {
return Level2{Data: l1.Value}
})
// Compose them
effect2 := localReaderK23(effect3)
effect1 := local12(effect2)
// Run
ioResult := Provide[string](Level1{Value: "test"})(effect1)
readerResult := RunSync(ioResult)
result, err := readerResult(context.Background())
assert.NoError(t, err)
assert.Equal(t, "result", result)
})
t.Run("runtime context deadline awareness", func(t *testing.T) {
type Config struct {
HasDeadline bool
}
// Reader that checks runtime context for deadline
checkContext := func(path string) reader.Reader[Config] {
return func(ctx context.Context) Config {
_, hasDeadline := ctx.Deadline()
return Config{HasDeadline: hasDeadline}
}
}
// Effect that uses the config
configEffect := Chain(func(cfg Config) Effect[Config, string] {
return Of[Config](fmt.Sprintf("Has deadline: %v", cfg.HasDeadline))
})(readerreaderioresult.Ask[Config]())
transform := LocalReaderK[string](checkContext)
pathEffect := transform(configEffect)
// Without deadline
ioResult := Provide[string]("config.json")(pathEffect)
readerResult := RunSync(ioResult)
result, err := readerResult(context.Background())
assert.NoError(t, err)
assert.Equal(t, "Has deadline: false", result)
// With deadline
ctxWithDeadline, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
ioResult2 := Provide[string]("config.json")(pathEffect)
readerResult2 := RunSync(ioResult2)
result2, err2 := readerResult2(ctxWithDeadline)
assert.NoError(t, err2)
assert.Equal(t, "Has deadline: true", result2)
})
}

View File

@@ -40,7 +40,7 @@
// increment := N.Add(1)
//
// // Compose them (RIGHT-TO-LEFT execution)
// composed := endomorphism.Compose(double, increment)
// composed := endomorphism.MonadCompose(double, increment)
// result := composed(5) // increment(5) then double: (5 + 1) * 2 = 12
//
// // Chain them (LEFT-TO-RIGHT execution)
@@ -61,11 +61,11 @@
// monoid := endomorphism.Monoid[int]()
//
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)
// combined := M.ConcatAll(monoid)(
// combined := M.ConcatAll(monoid)([]endomorphism.Endomorphism[int]{
// N.Mul(2), // applied third
// N.Add(1), // applied second
// N.Mul(3), // applied first
// )
// })
// result := combined(5) // (5 * 3) = 15, (15 + 1) = 16, (16 * 2) = 32
//
// # Monad Operations
@@ -87,7 +87,7 @@
// increment := N.Add(1)
//
// // Compose: RIGHT-TO-LEFT (mathematical composition)
// composed := endomorphism.Compose(double, increment)
// composed := endomorphism.MonadCompose(double, increment)
// result1 := composed(5) // increment(5) * 2 = (5 + 1) * 2 = 12
//
// // MonadChain: LEFT-TO-RIGHT (sequential application)

View File

@@ -111,15 +111,19 @@ func MonadCompose[A any](f, g Endomorphism[A]) Endomorphism[A] {
// This is the functor map operation for endomorphisms.
//
// IMPORTANT: Execution order is RIGHT-TO-LEFT:
// - g is applied first to the input
// - ma is applied first to the input
// - f is applied to the result
//
// Note: unlike most other packages where MonadMap takes (fa, f) with the container
// first, here f (the morphism) comes first to match the right-to-left composition
// convention: MonadMap(f, ma) = f ∘ ma.
//
// Parameters:
// - f: The function to map (outer function)
// - g: The endomorphism to map over (inner function)
// - f: The function to map (outer function, applied second)
// - ma: The endomorphism to map over (inner function, applied first)
//
// Returns:
// - A new endomorphism that applies g, then f
// - A new endomorphism that applies ma, then f
//
// Example:
//
@@ -127,8 +131,8 @@ func MonadCompose[A any](f, g Endomorphism[A]) Endomorphism[A] {
// increment := N.Add(1)
// mapped := endomorphism.MonadMap(double, increment)
// // mapped(5) = double(increment(5)) = double(6) = 12
func MonadMap[A any](f, g Endomorphism[A]) Endomorphism[A] {
return MonadCompose(f, g)
func MonadMap[A any](f, ma Endomorphism[A]) Endomorphism[A] {
return MonadCompose(f, ma)
}
// Compose returns a function that composes an endomorphism with another, executing right to left.
@@ -386,3 +390,91 @@ func Join[A any](f Kleisli[A]) Endomorphism[A] {
return f(a)(a)
}
}
// Read captures a value and returns a function that applies endomorphisms to it.
//
// This function implements a "reader" pattern for endomorphisms. It takes a value
// and returns a function that can apply any endomorphism to that captured value.
// This is useful for creating reusable evaluation contexts where you want to apply
// different transformations to the same initial value.
//
// The returned function has the signature func(Endomorphism[A]) A, which means
// it takes an endomorphism and returns the result of applying that endomorphism
// to the captured value.
//
// # Type Parameters
//
// - A: The type of the value being captured and transformed
//
// # Parameters
//
// - a: The value to capture for later transformation
//
// # Returns
//
// - A function that applies endomorphisms to the captured value
//
// # Example - Basic Usage
//
// // Capture a value
// applyTo5 := Read(5)
//
// // Apply different endomorphisms to the same value
// doubled := applyTo5(N.Mul(2)) // 10
// incremented := applyTo5(N.Add(1)) // 6
// squared := applyTo5(func(x int) int { return x * x }) // 25
//
// # Example - Reusable Evaluation Context
//
// type Config struct {
// Timeout int
// Retries int
// }
//
// baseConfig := Config{Timeout: 30, Retries: 3}
// applyToBase := Read(baseConfig)
//
// // Apply different transformations to the same base config
// withLongTimeout := applyToBase(func(c Config) Config {
// c.Timeout = 60
// return c
// })
//
// withMoreRetries := applyToBase(func(c Config) Config {
// c.Retries = 5
// return c
// })
//
// # Example - Testing Different Transformations
//
// // Useful for testing multiple transformations on the same input
// testValue := "hello"
// applyToTest := Read(testValue)
//
// upperCase := applyToTest(strings.ToUpper) // "HELLO"
// withSuffix := applyToTest(func(s string) string {
// return s + " world"
// }) // "hello world"
//
// # Use Cases
//
// 1. **Testing**: Apply multiple transformations to the same test value
// 2. **Configuration**: Create variations of a base configuration
// 3. **Data Processing**: Evaluate different processing pipelines on the same data
// 4. **Benchmarking**: Compare different endomorphisms on the same input
// 5. **Functional Composition**: Build evaluation contexts for composed operations
//
// # Relationship to Other Functions
//
// Read is complementary to other endomorphism operations:
// - Build applies an endomorphism to the zero value
// - Read applies endomorphisms to a specific captured value
// - Reduce applies multiple endomorphisms sequentially
// - ConcatAll composes multiple endomorphisms
//
//go:inline
func Read[A any](a A) func(Endomorphism[A]) A {
return func(f Endomorphism[A]) A {
return f(a)
}
}

View File

@@ -1071,3 +1071,226 @@ func TestReduceWithBuild(t *testing.T) {
assert.NotEqual(t, reduceResult, buildResult, "Reduce and Build(ConcatAll) produce different results due to execution order")
}
// TestRead tests the Read function
func TestRead(t *testing.T) {
t.Run("applies endomorphism to captured value", func(t *testing.T) {
applyTo5 := Read(5)
result := applyTo5(double)
assert.Equal(t, 10, result, "Read should apply double to captured value 5")
result2 := applyTo5(increment)
assert.Equal(t, 6, result2, "Read should apply increment to captured value 5")
result3 := applyTo5(square)
assert.Equal(t, 25, result3, "Read should apply square to captured value 5")
})
t.Run("captures value for reuse", func(t *testing.T) {
applyTo10 := Read(10)
// Apply multiple different endomorphisms to the same captured value
doubled := applyTo10(double)
incremented := applyTo10(increment)
negated := applyTo10(negate)
assert.Equal(t, 20, doubled, "Should double 10")
assert.Equal(t, 11, incremented, "Should increment 10")
assert.Equal(t, -10, negated, "Should negate 10")
})
t.Run("works with identity", func(t *testing.T) {
applyTo42 := Read(42)
result := applyTo42(Identity[int]())
assert.Equal(t, 42, result, "Read with identity should return original value")
})
t.Run("works with composed endomorphisms", func(t *testing.T) {
applyTo5 := Read(5)
// Compose: double then increment (RIGHT-TO-LEFT)
composed := MonadCompose(increment, double)
result := applyTo5(composed)
assert.Equal(t, 11, result, "Read should work with composed endomorphisms: (5 * 2) + 1 = 11")
})
t.Run("works with chained endomorphisms", func(t *testing.T) {
applyTo5 := Read(5)
// Chain: double then increment (LEFT-TO-RIGHT)
chained := MonadChain(double, increment)
result := applyTo5(chained)
assert.Equal(t, 11, result, "Read should work with chained endomorphisms: (5 * 2) + 1 = 11")
})
t.Run("works with ConcatAll", func(t *testing.T) {
applyTo5 := Read(5)
// ConcatAll composes RIGHT-TO-LEFT
combined := ConcatAll([]Endomorphism[int]{double, increment, square})
result := applyTo5(combined)
// Execution: square(5) = 25, increment(25) = 26, double(26) = 52
assert.Equal(t, 52, result, "Read should work with ConcatAll")
})
t.Run("works with different types", func(t *testing.T) {
// Test with string
applyToHello := Read("hello")
toUpper := func(s string) string { return s + " WORLD" }
result := applyToHello(toUpper)
assert.Equal(t, "hello WORLD", result, "Read should work with strings")
// Test with struct
type Point struct {
X, Y int
}
applyToPoint := Read(Point{X: 3, Y: 4})
scaleX := func(p Point) Point {
p.X *= 2
return p
}
result2 := applyToPoint(scaleX)
assert.Equal(t, Point{X: 6, Y: 4}, result2, "Read should work with structs")
})
t.Run("creates independent evaluation contexts", func(t *testing.T) {
applyTo5 := Read(5)
applyTo10 := Read(10)
// Same endomorphism, different contexts
result5 := applyTo5(double)
result10 := applyTo10(double)
assert.Equal(t, 10, result5, "First context should double 5")
assert.Equal(t, 20, result10, "Second context should double 10")
})
t.Run("useful for testing transformations", func(t *testing.T) {
testValue := 100
applyToTest := Read(testValue)
// Test multiple transformations on the same value
transformations := []struct {
name string
endo Endomorphism[int]
expected int
}{
{"double", double, 200},
{"increment", increment, 101},
{"negate", negate, -100},
{"square", square, 10000},
}
for _, tt := range transformations {
t.Run(tt.name, func(t *testing.T) {
result := applyToTest(tt.endo)
assert.Equal(t, tt.expected, result)
})
}
})
t.Run("works with monoid operations", func(t *testing.T) {
applyTo5 := Read(5)
// Use monoid to combine endomorphisms
combined := M.ConcatAll(Monoid[int]())([]Endomorphism[int]{
double,
increment,
})
result := applyTo5(combined)
// RIGHT-TO-LEFT: increment(5) = 6, double(6) = 12
assert.Equal(t, 12, result, "Read should work with monoid operations")
})
t.Run("configuration example", func(t *testing.T) {
type Config struct {
Timeout int
Retries int
}
baseConfig := Config{Timeout: 30, Retries: 3}
applyToBase := Read(baseConfig)
withLongTimeout := func(c Config) Config {
c.Timeout = 60
return c
}
withMoreRetries := func(c Config) Config {
c.Retries = 5
return c
}
result1 := applyToBase(withLongTimeout)
assert.Equal(t, Config{Timeout: 60, Retries: 3}, result1)
result2 := applyToBase(withMoreRetries)
assert.Equal(t, Config{Timeout: 30, Retries: 5}, result2)
// Original is unchanged
result3 := applyToBase(Identity[Config]())
assert.Equal(t, baseConfig, result3)
})
}
// TestReadWithBuild tests the relationship between Read and Build
func TestReadWithBuild(t *testing.T) {
t.Run("Read applies to specific value, Build to zero value", func(t *testing.T) {
endo := double
// Build applies to zero value
builtResult := Build(endo)
assert.Equal(t, 0, builtResult, "Build should apply to zero value: 0 * 2 = 0")
// Read applies to specific value
readResult := Read(5)(endo)
assert.Equal(t, 10, readResult, "Read should apply to captured value: 5 * 2 = 10")
})
t.Run("Read can evaluate Build results", func(t *testing.T) {
// Build an endomorphism
builder := ConcatAll([]Endomorphism[int]{double, increment})
// Apply it to zero value
builtValue := Build(builder)
// RIGHT-TO-LEFT: increment(0) = 1, double(1) = 2
assert.Equal(t, 2, builtValue)
// Now use Read to apply the same builder to a different value
readValue := Read(5)(builder)
// RIGHT-TO-LEFT: increment(5) = 6, double(6) = 12
assert.Equal(t, 12, readValue)
})
}
// BenchmarkRead benchmarks the Read function
func BenchmarkRead(b *testing.B) {
applyTo5 := Read(5)
b.Run("simple endomorphism", func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = applyTo5(double)
}
})
b.Run("composed endomorphism", func(b *testing.B) {
composed := MonadCompose(double, increment)
for i := 0; i < b.N; i++ {
_ = applyTo5(composed)
}
})
b.Run("ConcatAll endomorphism", func(b *testing.B) {
combined := ConcatAll([]Endomorphism[int]{double, increment, square})
for i := 0; i < b.N; i++ {
_ = applyTo5(combined)
}
})
}

View File

@@ -144,8 +144,8 @@ func Semigroup[A any]() S.Semigroup[Endomorphism[A]] {
// square := func(x int) int { return x * x }
//
// // Combine multiple endomorphisms (RIGHT-TO-LEFT execution)
// combined := M.ConcatAll(monoid)(double, increment, square)
// result := combined(5) // square(increment(double(5))) = square(increment(10)) = square(11) = 121
// combined := M.ConcatAll(monoid)([]Endomorphism[int]{double, increment, square})
// result := combined(5) // double(increment(square(5))) = double(increment(25)) = double(26) = 52
func Monoid[A any]() M.Monoid[Endomorphism[A]] {
return M.MakeMonoid(MonadCompose[A], Identity[A]())
}

View File

@@ -41,20 +41,22 @@ type (
// It's a function from A to Endomorphism[A], used for composing endomorphic operations.
Kleisli[A any] = func(A) Endomorphism[A]
// Operator represents a transformation from one endomorphism to another.
// Operator represents a higher-order transformation on endomorphisms of the same type.
//
// An Operator takes an endomorphism on type A and produces an endomorphism on type B.
// This is useful for lifting operations or transforming endomorphisms in a generic way.
// An Operator takes an endomorphism on type A and produces another endomorphism on type A.
// Since Operator[A] = Endomorphism[Endomorphism[A]] = func(func(A)A) func(A)A,
// both the input and output endomorphisms operate on the same type A.
//
// This is the return type of curried operations such as Compose, Map, and Chain.
//
// Example:
//
// // An operator that converts an int endomorphism to a string endomorphism
// intToString := func(f endomorphism.Endomorphism[int]) endomorphism.Endomorphism[string] {
// return func(s string) string {
// n, _ := strconv.Atoi(s)
// result := f(n)
// return strconv.Itoa(result)
// }
// // An operator that applies any endomorphism twice
// var applyTwice endomorphism.Operator[int] = func(f endomorphism.Endomorphism[int]) endomorphism.Endomorphism[int] {
// return func(x int) int { return f(f(x)) }
// }
// double := N.Mul(2)
// result := applyTwice(double) // double ∘ double
// // result(5) = double(double(5)) = double(10) = 20
Operator[A any] = Endomorphism[Endomorphism[A]]
)

View File

@@ -61,6 +61,18 @@ func LocalReaderIOEitherK[A, C, E, R1, R2 any](f readerioeither.Kleisli[C, E, R2
}
}
//go:inline
func LocalReaderK[E, A, C, R1, R2 any](f reader.Kleisli[C, R2, R1]) func(ReaderReaderIOEither[R1, C, E, A]) ReaderReaderIOEither[R2, C, E, A] {
return func(rri ReaderReaderIOEither[R1, C, E, A]) ReaderReaderIOEither[R2, C, E, A] {
return F.Flow4(
f,
readerioeither.FromReader,
readerioeither.Map[C, E](rri),
readerioeither.Flatten,
)
}
}
//go:inline
func LocalReaderReaderIOEitherK[A, C, E, R1, R2 any](f Kleisli[R2, C, E, R2, R1]) func(ReaderReaderIOEither[R1, C, E, A]) ReaderReaderIOEither[R2, C, E, A] {
return func(rri ReaderReaderIOEither[R1, C, E, A]) ReaderReaderIOEither[R2, C, E, A] {

View File

@@ -38,21 +38,41 @@ func IsNonEmpty[M ~map[K]V, K comparable, V any](r M) bool {
}
func Keys[M ~map[K]V, GK ~[]K, K comparable, V any](r M) GK {
// fast path
if len(r) == 0 {
return nil
}
// full implementation
return collect[M, GK](r, F.First[K, V])
}
func Values[M ~map[K]V, GV ~[]V, K comparable, V any](r M) GV {
// fast path
if len(r) == 0 {
return nil
}
// full implementation
return collect[M, GV](r, F.Second[K, V])
}
func KeysOrd[M ~map[K]V, GK ~[]K, K comparable, V any](o ord.Ord[K]) func(r M) GK {
return func(r M) GK {
// fast path
if len(r) == 0 {
return nil
}
// full implementation
return collectOrd[M, GK](o, r, F.First[K, V])
}
}
func ValuesOrd[M ~map[K]V, GV ~[]V, K comparable, V any](o ord.Ord[K]) func(r M) GV {
return func(r M) GV {
// fast path
if len(r) == 0 {
return nil
}
// full implementation
return collectOrd[M, GV](o, r, F.Second[K, V])
}
}
@@ -97,12 +117,18 @@ func collect[M ~map[K]V, GR ~[]R, K comparable, V, R any](r M, f func(K, V) R) G
}
func Collect[M ~map[K]V, GR ~[]R, K comparable, V, R any](f func(K, V) R) func(M) GR {
// full implementation
return F.Bind2nd(collect[M, GR, K, V, R], f)
}
func CollectOrd[M ~map[K]V, GR ~[]R, K comparable, V, R any](o ord.Ord[K]) func(f func(K, V) R) func(M) GR {
return func(f func(K, V) R) func(M) GR {
return func(r M) GR {
// fast path
if len(r) == 0 {
return nil
}
// full implementation
return collectOrd[M, GR](o, r, f)
}
}
@@ -416,12 +442,22 @@ func duplicate[M ~map[K]V, K comparable, V any](r M) M {
}
func upsertAt[M ~map[K]V, K comparable, V any](r M, k K, v V) M {
// fast path
if len(r) == 0 {
return Singleton[M](k, v)
}
// duplicate and update
dup := duplicate(r)
dup[k] = v
return dup
}
func deleteAt[M ~map[K]V, K comparable, V any](r M, k K) M {
// fast path
if len(r) == 0 {
return r
}
// duplicate and update
dup := duplicate(r)
delete(dup, k)
return dup

View File

@@ -55,10 +55,16 @@ func IsNonEmpty[K comparable, V any](r Record[K, V]) bool {
// The order of keys is non-deterministic due to Go's map iteration behavior.
// Use KeysOrd if you need keys in a specific order.
//
// Note: The return value can be nil in case of an empty map, since nil is a
// valid representation of an empty slice in Go.
//
// Example:
//
// record := Record[string, int]{"a": 1, "b": 2, "c": 3}
// keys := Keys(record) // ["a", "b", "c"] in any order
//
// emptyRecord := Record[string, int]{}
// emptyKeys := Keys(emptyRecord) // nil or []string{}
func Keys[K comparable, V any](r Record[K, V]) []K {
return G.Keys[Record[K, V], []K](r)
}
@@ -68,10 +74,16 @@ func Keys[K comparable, V any](r Record[K, V]) []K {
// The order of values is non-deterministic due to Go's map iteration behavior.
// Use ValuesOrd if you need values ordered by their keys.
//
// Note: The return value can be nil in case of an empty map, since nil is a
// valid representation of an empty slice in Go.
//
// Example:
//
// record := Record[string, int]{"a": 1, "b": 2, "c": 3}
// values := Values(record) // [1, 2, 3] in any order
//
// emptyRecord := Record[string, int]{}
// emptyValues := Values(emptyRecord) // nil or []int{}
func Values[K comparable, V any](r Record[K, V]) []V {
return G.Values[Record[K, V], []V](r)
}
@@ -98,6 +110,9 @@ func Collect[K comparable, V, R any](f func(K, V) R) func(Record[K, V]) []R {
//
// Unlike Collect, this function guarantees the order of results based on key ordering.
//
// Note: The return value can be nil in case of an empty map, since nil is a
// valid representation of an empty slice in Go.
//
// Example:
//
// record := Record[string, int]{"c": 3, "a": 1, "b": 2}
@@ -105,6 +120,9 @@ func Collect[K comparable, V, R any](f func(K, V) R) func(Record[K, V]) []R {
// return fmt.Sprintf("%s=%d", k, v)
// })
// result := toStrings(record) // ["a=1", "b=2", "c=3"] (ordered by key)
//
// emptyRecord := Record[string, int]{}
// emptyResult := toStrings(emptyRecord) // nil or []string{}
func CollectOrd[V, R any, K comparable](o ord.Ord[K]) func(func(K, V) R) func(Record[K, V]) []R {
return G.CollectOrd[Record[K, V], []R](o)
}
@@ -458,11 +476,18 @@ func UpsertAt[K comparable, V any](k K, v V) Operator[K, V, V] {
// If the key doesn't exist, the record is returned unchanged.
// The original record is not modified; a new record is returned.
//
// In case of an empty input map (including nil maps), the identical map is returned,
// since deleting from an empty map is an idempotent operation.
//
// Example:
//
// record := Record[string, int]{"a": 1, "b": 2, "c": 3}
// removeB := DeleteAt[string, int]("b")
// result := removeB(record) // {"a": 1, "c": 3}
//
// // Deleting from empty map returns empty map
// emptyRecord := Record[string, int]{}
// result2 := removeB(emptyRecord) // {}
func DeleteAt[K comparable, V any](k K) Operator[K, V, V] {
return G.DeleteAt[Record[K, V]](k)
}

View File

@@ -0,0 +1,552 @@
// Copyright (c) 2023 - 2025 IBM Corp.
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package record
import (
"fmt"
"testing"
O "github.com/IBM/fp-go/v2/option"
P "github.com/IBM/fp-go/v2/pair"
SG "github.com/IBM/fp-go/v2/semigroup"
S "github.com/IBM/fp-go/v2/string"
"github.com/stretchr/testify/assert"
)
// TestNilMap_IsEmpty verifies that IsEmpty handles nil maps correctly
func TestNilMap_IsEmpty(t *testing.T) {
var nilMap Record[string, int]
assert.True(t, IsEmpty(nilMap), "nil map should be empty")
}
// TestNilMap_IsNonEmpty verifies that IsNonEmpty handles nil maps correctly
func TestNilMap_IsNonEmpty(t *testing.T) {
var nilMap Record[string, int]
assert.False(t, IsNonEmpty(nilMap), "nil map should not be non-empty")
}
// TestNilMap_Keys verifies that Keys handles nil maps correctly
func TestNilMap_Keys(t *testing.T) {
var nilMap Record[string, int]
keys := Keys(nilMap)
// Keys can return nil for empty map, which is a valid representation of an empty slice
assert.Equal(t, 0, len(keys), "Keys should return empty slice for nil map")
}
// TestNilMap_Values verifies that Values handles nil maps correctly
func TestNilMap_Values(t *testing.T) {
var nilMap Record[string, int]
values := Values(nilMap)
// Values can return nil for empty map, which is a valid representation of an empty slice
assert.Equal(t, 0, len(values), "Values should return empty slice for nil map")
}
// TestNilMap_Collect verifies that Collect handles nil maps correctly
func TestNilMap_Collect(t *testing.T) {
var nilMap Record[string, int]
collector := Collect(func(k string, v int) string {
return fmt.Sprintf("%s=%d", k, v)
})
result := collector(nilMap)
assert.NotNil(t, result, "Collect should return non-nil slice")
assert.Equal(t, 0, len(result), "Collect should return empty slice for nil map")
}
// TestNilMap_Reduce verifies that Reduce handles nil maps correctly
func TestNilMap_Reduce(t *testing.T) {
var nilMap Record[string, int]
reducer := Reduce[string](func(acc int, v int) int {
return acc + v
}, 10)
result := reducer(nilMap)
assert.Equal(t, 10, result, "Reduce should return initial value for nil map")
}
// TestNilMap_ReduceWithIndex verifies that ReduceWithIndex handles nil maps correctly
func TestNilMap_ReduceWithIndex(t *testing.T) {
var nilMap Record[string, int]
reducer := ReduceWithIndex(func(k string, acc int, v int) int {
return acc + v
}, 10)
result := reducer(nilMap)
assert.Equal(t, 10, result, "ReduceWithIndex should return initial value for nil map")
}
// TestNilMap_ReduceRef verifies that ReduceRef handles nil maps correctly
func TestNilMap_ReduceRef(t *testing.T) {
var nilMap Record[string, int]
reducer := ReduceRef[string](func(acc int, v *int) int {
return acc + *v
}, 10)
result := reducer(nilMap)
assert.Equal(t, 10, result, "ReduceRef should return initial value for nil map")
}
// TestNilMap_ReduceRefWithIndex verifies that ReduceRefWithIndex handles nil maps correctly
func TestNilMap_ReduceRefWithIndex(t *testing.T) {
var nilMap Record[string, int]
reducer := ReduceRefWithIndex(func(k string, acc int, v *int) int {
return acc + *v
}, 10)
result := reducer(nilMap)
assert.Equal(t, 10, result, "ReduceRefWithIndex should return initial value for nil map")
}
// TestNilMap_MonadMap verifies that MonadMap handles nil maps correctly
func TestNilMap_MonadMap(t *testing.T) {
var nilMap Record[string, int]
result := MonadMap(nilMap, func(v int) string {
return fmt.Sprintf("%d", v)
})
assert.NotNil(t, result, "MonadMap should return non-nil map")
assert.Equal(t, 0, len(result), "MonadMap should return empty map for nil input")
}
// TestNilMap_MonadMapWithIndex verifies that MonadMapWithIndex handles nil maps correctly
func TestNilMap_MonadMapWithIndex(t *testing.T) {
var nilMap Record[string, int]
result := MonadMapWithIndex(nilMap, func(k string, v int) string {
return fmt.Sprintf("%s=%d", k, v)
})
assert.NotNil(t, result, "MonadMapWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "MonadMapWithIndex should return empty map for nil input")
}
// TestNilMap_MonadMapRefWithIndex verifies that MonadMapRefWithIndex handles nil maps correctly
func TestNilMap_MonadMapRefWithIndex(t *testing.T) {
var nilMap Record[string, int]
result := MonadMapRefWithIndex(nilMap, func(k string, v *int) string {
return fmt.Sprintf("%s=%d", k, *v)
})
assert.NotNil(t, result, "MonadMapRefWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "MonadMapRefWithIndex should return empty map for nil input")
}
// TestNilMap_MonadMapRef verifies that MonadMapRef handles nil maps correctly
func TestNilMap_MonadMapRef(t *testing.T) {
var nilMap Record[string, int]
result := MonadMapRef(nilMap, func(v *int) string {
return fmt.Sprintf("%d", *v)
})
assert.NotNil(t, result, "MonadMapRef should return non-nil map")
assert.Equal(t, 0, len(result), "MonadMapRef should return empty map for nil input")
}
// TestNilMap_Map verifies that Map handles nil maps correctly
func TestNilMap_Map(t *testing.T) {
var nilMap Record[string, int]
mapper := Map[string](func(v int) string {
return fmt.Sprintf("%d", v)
})
result := mapper(nilMap)
assert.NotNil(t, result, "Map should return non-nil map")
assert.Equal(t, 0, len(result), "Map should return empty map for nil input")
}
// TestNilMap_MapRef verifies that MapRef handles nil maps correctly
func TestNilMap_MapRef(t *testing.T) {
var nilMap Record[string, int]
mapper := MapRef[string](func(v *int) string {
return fmt.Sprintf("%d", *v)
})
result := mapper(nilMap)
assert.NotNil(t, result, "MapRef should return non-nil map")
assert.Equal(t, 0, len(result), "MapRef should return empty map for nil input")
}
// TestNilMap_MapWithIndex verifies that MapWithIndex handles nil maps correctly
func TestNilMap_MapWithIndex(t *testing.T) {
var nilMap Record[string, int]
mapper := MapWithIndex[string](func(k string, v int) string {
return fmt.Sprintf("%s=%d", k, v)
})
result := mapper(nilMap)
assert.NotNil(t, result, "MapWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "MapWithIndex should return empty map for nil input")
}
// TestNilMap_MapRefWithIndex verifies that MapRefWithIndex handles nil maps correctly
func TestNilMap_MapRefWithIndex(t *testing.T) {
var nilMap Record[string, int]
mapper := MapRefWithIndex[string](func(k string, v *int) string {
return fmt.Sprintf("%s=%d", k, *v)
})
result := mapper(nilMap)
assert.NotNil(t, result, "MapRefWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "MapRefWithIndex should return empty map for nil input")
}
// TestNilMap_Lookup verifies that Lookup handles nil maps correctly
func TestNilMap_Lookup(t *testing.T) {
var nilMap Record[string, int]
lookup := Lookup[int]("key")
result := lookup(nilMap)
assert.True(t, O.IsNone(result), "Lookup should return None for nil map")
}
// TestNilMap_MonadLookup verifies that MonadLookup handles nil maps correctly
func TestNilMap_MonadLookup(t *testing.T) {
var nilMap Record[string, int]
result := MonadLookup(nilMap, "key")
assert.True(t, O.IsNone(result), "MonadLookup should return None for nil map")
}
// TestNilMap_Has verifies that Has handles nil maps correctly
func TestNilMap_Has(t *testing.T) {
var nilMap Record[string, int]
result := Has("key", nilMap)
assert.False(t, result, "Has should return false for nil map")
}
// TestNilMap_Union verifies that Union handles nil maps correctly
func TestNilMap_Union(t *testing.T) {
var nilMap Record[string, int]
nonNilMap := Record[string, int]{"a": 1, "b": 2}
semigroup := SG.Last[int]()
union := Union[string](semigroup)
// nil union non-nil
result1 := union(nonNilMap)(nilMap)
assert.Equal(t, nonNilMap, result1, "nil union non-nil should return non-nil map")
// non-nil union nil
result2 := union(nilMap)(nonNilMap)
assert.Equal(t, nonNilMap, result2, "non-nil union nil should return non-nil map")
// nil union nil - returns nil when both inputs are nil (optimization)
result3 := union(nilMap)(nilMap)
assert.Nil(t, result3, "nil union nil returns nil")
}
// TestNilMap_Merge verifies that Merge handles nil maps correctly
func TestNilMap_Merge(t *testing.T) {
var nilMap Record[string, int]
nonNilMap := Record[string, int]{"a": 1, "b": 2}
// nil merge non-nil
result1 := Merge(nonNilMap)(nilMap)
assert.Equal(t, nonNilMap, result1, "nil merge non-nil should return non-nil map")
// non-nil merge nil
result2 := Merge(nilMap)(nonNilMap)
assert.Equal(t, nonNilMap, result2, "non-nil merge nil should return non-nil map")
// nil merge nil - returns nil when both inputs are nil (optimization)
result3 := Merge(nilMap)(nilMap)
assert.Nil(t, result3, "nil merge nil returns nil")
}
// TestNilMap_Size verifies that Size handles nil maps correctly
func TestNilMap_Size(t *testing.T) {
var nilMap Record[string, int]
result := Size(nilMap)
assert.Equal(t, 0, result, "Size should return 0 for nil map")
}
// TestNilMap_ToArray verifies that ToArray handles nil maps correctly
func TestNilMap_ToArray(t *testing.T) {
var nilMap Record[string, int]
result := ToArray(nilMap)
assert.NotNil(t, result, "ToArray should return non-nil slice")
assert.Equal(t, 0, len(result), "ToArray should return empty slice for nil map")
}
// TestNilMap_ToEntries verifies that ToEntries handles nil maps correctly
func TestNilMap_ToEntries(t *testing.T) {
var nilMap Record[string, int]
result := ToEntries(nilMap)
assert.NotNil(t, result, "ToEntries should return non-nil slice")
assert.Equal(t, 0, len(result), "ToEntries should return empty slice for nil map")
}
// TestNilMap_UpsertAt verifies that UpsertAt handles nil maps correctly
func TestNilMap_UpsertAt(t *testing.T) {
var nilMap Record[string, int]
upsert := UpsertAt("key", 42)
result := upsert(nilMap)
assert.NotNil(t, result, "UpsertAt should return non-nil map")
assert.Equal(t, 1, len(result), "UpsertAt should create map with one entry")
assert.Equal(t, 42, result["key"], "UpsertAt should insert value correctly")
}
// TestNilMap_DeleteAt verifies that DeleteAt handles nil maps correctly
func TestNilMap_DeleteAt(t *testing.T) {
var nilMap Record[string, int]
deleteFunc := DeleteAt[string, int]("key")
result := deleteFunc(nilMap)
// DeleteAt returns the identical map for nil input (idempotent operation)
assert.Nil(t, result, "DeleteAt should return nil for nil input (idempotent)")
assert.Equal(t, nilMap, result, "DeleteAt should return identical map for nil input")
// Verify that deleting from empty (non-nil) map returns identical map (idempotent)
emptyMap := Record[string, int]{}
result2 := deleteFunc(emptyMap)
assert.NotNil(t, result2, "DeleteAt should return non-nil map for empty input")
assert.Equal(t, 0, len(result2), "DeleteAt should return empty map for empty input")
assert.Equal(t, emptyMap, result2, "DeleteAt on empty map should be idempotent")
}
// TestNilMap_Filter verifies that Filter handles nil maps correctly
func TestNilMap_Filter(t *testing.T) {
var nilMap Record[string, int]
filter := Filter[string, int](func(k string) bool {
return true
})
result := filter(nilMap)
assert.NotNil(t, result, "Filter should return non-nil map")
assert.Equal(t, 0, len(result), "Filter should return empty map for nil input")
}
// TestNilMap_FilterWithIndex verifies that FilterWithIndex handles nil maps correctly
func TestNilMap_FilterWithIndex(t *testing.T) {
var nilMap Record[string, int]
filter := FilterWithIndex[string, int](func(k string, v int) bool {
return true
})
result := filter(nilMap)
assert.NotNil(t, result, "FilterWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "FilterWithIndex should return empty map for nil input")
}
// TestNilMap_IsNil verifies that IsNil handles nil maps correctly
func TestNilMap_IsNil(t *testing.T) {
var nilMap Record[string, int]
assert.True(t, IsNil(nilMap), "IsNil should return true for nil map")
nonNilMap := Record[string, int]{}
assert.False(t, IsNil(nonNilMap), "IsNil should return false for non-nil empty map")
}
// TestNilMap_IsNonNil verifies that IsNonNil handles nil maps correctly
func TestNilMap_IsNonNil(t *testing.T) {
var nilMap Record[string, int]
assert.False(t, IsNonNil(nilMap), "IsNonNil should return false for nil map")
nonNilMap := Record[string, int]{}
assert.True(t, IsNonNil(nonNilMap), "IsNonNil should return true for non-nil empty map")
}
// TestNilMap_MonadChainWithIndex verifies that MonadChainWithIndex handles nil maps correctly
func TestNilMap_MonadChainWithIndex(t *testing.T) {
var nilMap Record[string, int]
monoid := MergeMonoid[string, string]()
result := MonadChainWithIndex(monoid, nilMap, func(k string, v int) Record[string, string] {
return Record[string, string]{k: fmt.Sprintf("%d", v)}
})
assert.NotNil(t, result, "MonadChainWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "MonadChainWithIndex should return empty map for nil input")
}
// TestNilMap_MonadChain verifies that MonadChain handles nil maps correctly
func TestNilMap_MonadChain(t *testing.T) {
var nilMap Record[string, int]
monoid := MergeMonoid[string, string]()
result := MonadChain(monoid, nilMap, func(v int) Record[string, string] {
return Record[string, string]{"key": fmt.Sprintf("%d", v)}
})
assert.NotNil(t, result, "MonadChain should return non-nil map")
assert.Equal(t, 0, len(result), "MonadChain should return empty map for nil input")
}
// TestNilMap_ChainWithIndex verifies that ChainWithIndex handles nil maps correctly
func TestNilMap_ChainWithIndex(t *testing.T) {
var nilMap Record[string, int]
monoid := MergeMonoid[string, string]()
chain := ChainWithIndex[int, string](monoid)(func(k string, v int) Record[string, string] {
return Record[string, string]{k: fmt.Sprintf("%d", v)}
})
result := chain(nilMap)
assert.NotNil(t, result, "ChainWithIndex should return non-nil map")
assert.Equal(t, 0, len(result), "ChainWithIndex should return empty map for nil input")
}
// TestNilMap_Chain verifies that Chain handles nil maps correctly
func TestNilMap_Chain(t *testing.T) {
var nilMap Record[string, int]
monoid := MergeMonoid[string, string]()
chain := Chain[int, string](monoid)(func(v int) Record[string, string] {
return Record[string, string]{"key": fmt.Sprintf("%d", v)}
})
result := chain(nilMap)
assert.NotNil(t, result, "Chain should return non-nil map")
assert.Equal(t, 0, len(result), "Chain should return empty map for nil input")
}
// TestNilMap_Flatten verifies that Flatten handles nil maps correctly
func TestNilMap_Flatten(t *testing.T) {
var nilMap Record[string, Record[string, int]]
monoid := MergeMonoid[string, int]()
flatten := Flatten[string, int](monoid)
result := flatten(nilMap)
assert.NotNil(t, result, "Flatten should return non-nil map")
assert.Equal(t, 0, len(result), "Flatten should return empty map for nil input")
}
// TestNilMap_Copy verifies that Copy handles nil maps correctly
func TestNilMap_Copy(t *testing.T) {
var nilMap Record[string, int]
result := Copy(nilMap)
assert.NotNil(t, result, "Copy should return non-nil map")
assert.Equal(t, 0, len(result), "Copy should return empty map for nil input")
}
// TestNilMap_Clone verifies that Clone handles nil maps correctly
func TestNilMap_Clone(t *testing.T) {
var nilMap Record[string, int]
clone := Clone[string, int](func(v int) int { return v * 2 })
result := clone(nilMap)
assert.NotNil(t, result, "Clone should return non-nil map")
assert.Equal(t, 0, len(result), "Clone should return empty map for nil input")
}
// TestNilMap_FromArray verifies that FromArray handles nil/empty arrays correctly
func TestNilMap_FromArray(t *testing.T) {
semigroup := SG.Last[int]()
fromArray := FromArray[string, int](semigroup)
// Test with nil slice
var nilSlice Entries[string, int]
result1 := fromArray(nilSlice)
assert.NotNil(t, result1, "FromArray should return non-nil map for nil slice")
assert.Equal(t, 0, len(result1), "FromArray should return empty map for nil slice")
// Test with empty slice
emptySlice := Entries[string, int]{}
result2 := fromArray(emptySlice)
assert.NotNil(t, result2, "FromArray should return non-nil map for empty slice")
assert.Equal(t, 0, len(result2), "FromArray should return empty map for empty slice")
}
// TestNilMap_MonadAp verifies that MonadAp handles nil maps correctly
func TestNilMap_MonadAp(t *testing.T) {
var nilFab Record[string, func(int) string]
var nilFa Record[string, int]
monoid := MergeMonoid[string, string]()
// nil functions, nil values
result1 := MonadAp(monoid, nilFab, nilFa)
assert.NotNil(t, result1, "MonadAp should return non-nil map")
assert.Equal(t, 0, len(result1), "MonadAp should return empty map for nil inputs")
// nil functions, non-nil values
nonNilFa := Record[string, int]{"a": 1}
result2 := MonadAp(monoid, nilFab, nonNilFa)
assert.NotNil(t, result2, "MonadAp should return non-nil map")
assert.Equal(t, 0, len(result2), "MonadAp should return empty map when functions are nil")
// non-nil functions, nil values
nonNilFab := Record[string, func(int) string]{"a": func(v int) string { return fmt.Sprintf("%d", v) }}
result3 := MonadAp(monoid, nonNilFab, nilFa)
assert.NotNil(t, result3, "MonadAp should return non-nil map")
assert.Equal(t, 0, len(result3), "MonadAp should return empty map when values are nil")
}
// TestNilMap_Of verifies that Of creates a proper singleton map
func TestNilMap_Of(t *testing.T) {
result := Of("key", 42)
assert.NotNil(t, result, "Of should return non-nil map")
assert.Equal(t, 1, len(result), "Of should create map with one entry")
assert.Equal(t, 42, result["key"], "Of should set value correctly")
}
// TestNilMap_FromEntries verifies that FromEntries handles nil/empty slices correctly
func TestNilMap_FromEntries(t *testing.T) {
// Test with nil slice
var nilSlice Entries[string, int]
result1 := FromEntries(nilSlice)
assert.NotNil(t, result1, "FromEntries should return non-nil map for nil slice")
assert.Equal(t, 0, len(result1), "FromEntries should return empty map for nil slice")
// Test with empty slice
emptySlice := Entries[string, int]{}
result2 := FromEntries(emptySlice)
assert.NotNil(t, result2, "FromEntries should return non-nil map for empty slice")
assert.Equal(t, 0, len(result2), "FromEntries should return empty map for empty slice")
// Test with actual entries
entries := Entries[string, int]{
P.MakePair("a", 1),
P.MakePair("b", 2),
}
result3 := FromEntries(entries)
assert.NotNil(t, result3, "FromEntries should return non-nil map")
assert.Equal(t, 2, len(result3), "FromEntries should create map with correct size")
assert.Equal(t, 1, result3["a"], "FromEntries should set values correctly")
assert.Equal(t, 2, result3["b"], "FromEntries should set values correctly")
}
// TestNilMap_Singleton verifies that Singleton creates a proper singleton map
func TestNilMap_Singleton(t *testing.T) {
result := Singleton("key", 42)
assert.NotNil(t, result, "Singleton should return non-nil map")
assert.Equal(t, 1, len(result), "Singleton should create map with one entry")
assert.Equal(t, 42, result["key"], "Singleton should set value correctly")
}
// TestNilMap_Empty verifies that Empty creates an empty non-nil map
func TestNilMap_Empty(t *testing.T) {
result := Empty[string, int]()
assert.NotNil(t, result, "Empty should return non-nil map")
assert.Equal(t, 0, len(result), "Empty should return empty map")
assert.False(t, IsNil(result), "Empty should not return nil map")
}
// TestNilMap_ConstNil verifies that ConstNil returns a nil map
func TestNilMap_ConstNil(t *testing.T) {
result := ConstNil[string, int]()
assert.Nil(t, result, "ConstNil should return nil map")
assert.True(t, IsNil(result), "ConstNil should return nil map")
}
// TestNilMap_FoldMap verifies that FoldMap handles nil maps correctly
func TestNilMap_FoldMap(t *testing.T) {
var nilMap Record[string, int]
monoid := S.Monoid
foldMap := FoldMap[string, int, string](monoid)(func(v int) string {
return fmt.Sprintf("%d", v)
})
result := foldMap(nilMap)
assert.Equal(t, "", result, "FoldMap should return empty value for nil map")
}
// TestNilMap_FoldMapWithIndex verifies that FoldMapWithIndex handles nil maps correctly
func TestNilMap_FoldMapWithIndex(t *testing.T) {
var nilMap Record[string, int]
monoid := S.Monoid
foldMap := FoldMapWithIndex[string, int, string](monoid)(func(k string, v int) string {
return fmt.Sprintf("%s=%d", k, v)
})
result := foldMap(nilMap)
assert.Equal(t, "", result, "FoldMapWithIndex should return empty value for nil map")
}
// TestNilMap_Fold verifies that Fold handles nil maps correctly
func TestNilMap_Fold(t *testing.T) {
var nilMap Record[string, string]
monoid := S.Monoid
fold := Fold[string](monoid)
result := fold(nilMap)
assert.Equal(t, "", result, "Fold should return empty value for nil map")
}
// TestNilMap_MonadFlap verifies that MonadFlap handles nil maps correctly
func TestNilMap_MonadFlap(t *testing.T) {
var nilMap Record[string, func(int) string]
result := MonadFlap(nilMap, 42)
assert.NotNil(t, result, "MonadFlap should return non-nil map")
assert.Equal(t, 0, len(result), "MonadFlap should return empty map for nil input")
}