1
0
mirror of https://github.com/oauth2-proxy/oauth2-proxy.git synced 2025-03-19 21:27:58 +02:00

Merge branch 'master' into feat/static-upstream

This commit is contained in:
Christian Groschupp 2019-11-19 12:23:42 +01:00 committed by GitHub
commit 6d74a42e57
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 199 additions and 32 deletions

View File

@ -6,17 +6,19 @@
- [#259](https://github.com/pusher/oauth2_proxy/pull/259) Redirect to HTTPS (@jmickey)
- [#273](https://github.com/pusher/oauth2_proxy/pull/273) Support Go 1.13 (@dio)
- [#275](https://github.com/pusher/oauth2_proxy/pull/275) docker: build from debian buster (@syscll)
- [#258](https://github.com/pusher/oauth2_proxy/pull/258) Add IDToken for Azure provider
- [#258](https://github.com/pusher/oauth2_proxy/pull/258) Add IDToken for Azure provider (@leyshon)
- This PR adds the IDToken into the session for the Azure provider allowing requests to a backend to be identified as a specific user. As a consequence, if you are using a cookie to store the session the cookie will now exceed the 4kb size limit and be split into multiple cookies. This can cause problems when using nginx as a proxy, resulting in no cookie being passed at all. Either increase the proxy_buffer_size in nginx or implement the redis session storage (see https://pusher.github.io/oauth2_proxy/configuration#redis-storage)
- [#286](https://github.com/pusher/oauth2_proxy/pull/286) Requests.go updated with useful error messages (@biotom)
- [#274](https://github.com/pusher/oauth2_proxy/pull/274) Supports many github teams with api pagination support (@toshi-miura, @apratina)
- [#302](https://github.com/pusher/oauth2_proxy/pull/302) Rewrite dist script (@syscll)
- [#304](https://github.com/pusher/oauth2_proxy/pull/304) Add new Logo! :tada: (@JoelSpeed)
- [#300](https://github.com/pusher/oauth2_proxy/pull/300) Added userinfo endpoint (@kbabuadze)
- [#309](https://github.com/pusher/oauth2_proxy/pull/309) Added support for custom CA when connecting to Redis cache (@lleszczu)
- [#248](https://github.com/pusher/oauth2_proxy/pull/248) Fix issue with X-Auth-Request-Redirect header being ignored (@webnard)
- [#265](https://github.com/pusher/oauth2_proxy/pull/265) Add upstream with static response (@cgroschupp)
# v4.0.0
- [#248](https://github.com/pusher/oauth2_proxy/pull/248) Fix issue with X-Auth-Request-Redirect header being ignored
## Release Highlights
- Documentation is now on a [microsite](https://pusher.github.io/oauth2_proxy/)
- Health check logging can now be disabled for quieter logs

View File

@ -0,0 +1,30 @@
#
# Autocompletion for oauth2_proxy
#
# To install this, copy/move this file to /etc/bash.completion.d/
# or add a line to your ~/.bashrc | ~/.bash_profile that says ". /path/to/oauth2_proxy/contrib/oauth2_proxy_autocomplete.sh"
#
_oauth2_proxy() {
_oauth2_proxy_commands=$(oauth2_proxy -h 2>&1 | sed -n '/^\s*-/s/ \+/ /gp' | awk '{print $1}' | tr '\n' ' ')
local cur prev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
case "$prev" in
-@(config|tls-cert-file|tls-key-file|authenticated-emails-file|htpasswd-file|custom-templates-dir|logging-filename|jwt-key-file))
_filedir
return 0
;;
-provider)
COMPREPLY=( $(compgen -W "google azure facebook github keycloak gitlab linkedin login.gov" -- ${cur}) )
return 0
;;
-@(http-address|https-address|redirect-url|upstream|basic-auth-password|skip-auth-regex|flush-interval|extra-jwt-issuers|email-domain|whitelist-domain|keycloak-group|azure-tenant|bitbucket-team|bitbucket-repository|github-org|github-team|gitlab-group|google-group|google-admin-email|google-service-account-json|client-id|client_secret|banner|footer|proxy-prefix|ping-path|cookie-name|cookie-secret|cookie-domain|cookie-path|cookie-expire|cookie-refresh|redist-sentinel-master-name|redist-sentinel-connection-urls|logging-max-size|logging-max-age|logging-max-backups|standard-logging-format|request-logging-format|exclude-logging-paths|auth-logging-format|oidc-issuer-url|oidc-jwks-url|login-url|redeem-url|profile-url|resource|validate-url|scope|approval-prompt|signature-key|acr-values|jwt-key|pubjwk-url))
return 0
;;
esac
COMPREPLY=( $(compgen -W "${_oauth2_proxy_commands}" -- ${cur}) )
return 0;
}
complete -F _oauth2_proxy oauth2_proxy

View File

@ -14,4 +14,5 @@ OAuth2 Proxy responds directly to the following endpoints. All other endpoints w
- /oauth2/sign_in - the login page, which also doubles as a sign out page (it clears cookies)
- /oauth2/start - a URL that will redirect to start the OAuth cycle
- /oauth2/callback - the URL used at the end of the OAuth cycle. The oauth app will be configured with this as the callback url.
- /oauth2/userinfo - the URL is used to return user's email from the session in JSON format.
- /oauth2/auth - only returns a 202 Accepted response or a 401 Unauthorized response; for use with the [Nginx `auth_request` directive](#nginx-auth-request)

View File

@ -91,6 +91,8 @@ func main() {
flagSet.String("redis-connection-url", "", "URL of redis server for redis session storage (eg: redis://HOST[:PORT])")
flagSet.Bool("redis-use-sentinel", false, "Connect to redis via sentinels. Must set --redis-sentinel-master-name and --redis-sentinel-connection-urls to use this feature")
flagSet.String("redis-sentinel-master-name", "", "Redis sentinel master name. Used in conjunction with --redis-use-sentinel")
flagSet.String("redis-ca-path", "", "Redis custom CA path")
flagSet.Bool("redis-insecure-skip-tls-verify", false, "Use insecure TLS connection to redis")
flagSet.Var(&redisSentinelConnectionURLs, "redis-sentinel-connection-urls", "List of Redis sentinel connection URLs (eg redis://HOST[:PORT]). Used in conjunction with --redis-use-sentinel")
flagSet.String("logging-filename", "", "File to log requests to, empty for stdout")

View File

@ -4,6 +4,7 @@ import (
"context"
"crypto/tls"
b64 "encoding/base64"
"encoding/json"
"errors"
"fmt"
"html/template"
@ -76,6 +77,7 @@ type OAuthProxy struct {
OAuthStartPath string
OAuthCallbackPath string
AuthOnlyPath string
UserInfoPath string
redirectURL *url.URL // the url to receive requests at
whitelistDomains []string
@ -278,6 +280,7 @@ func NewOAuthProxy(opts *Options, validator func(string) bool) *OAuthProxy {
OAuthStartPath: fmt.Sprintf("%s/start", opts.ProxyPrefix),
OAuthCallbackPath: fmt.Sprintf("%s/callback", opts.ProxyPrefix),
AuthOnlyPath: fmt.Sprintf("%s/auth", opts.ProxyPrefix),
UserInfoPath: fmt.Sprintf("%s/userinfo", opts.ProxyPrefix),
ProxyPrefix: opts.ProxyPrefix,
provider: opts.provider,
@ -569,6 +572,8 @@ func (p *OAuthProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
p.OAuthCallback(rw, req)
case path == p.AuthOnlyPath:
p.AuthenticateOnly(rw, req)
case path == p.UserInfoPath:
p.UserInfo(rw, req)
default:
p.Proxy(rw, req)
}
@ -597,6 +602,22 @@ func (p *OAuthProxy) SignIn(rw http.ResponseWriter, req *http.Request) {
}
}
//UserInfo endpoint outputs session email in JSON format
func (p *OAuthProxy) UserInfo(rw http.ResponseWriter, req *http.Request) {
session, err := p.getAuthenticatedSession(rw, req)
if err != nil {
http.Error(rw, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
userInfo := struct {
Email string `json:"email"`
}{session.Email}
rw.Header().Set("Content-Type", "application/json")
rw.WriteHeader(http.StatusOK)
json.NewEncoder(rw).Encode(userInfo)
}
// SignOut sends a response to clear the authentication cookie
func (p *OAuthProxy) SignOut(rw http.ResponseWriter, req *http.Request) {
p.ClearSessionCookie(rw, req)

View File

@ -775,6 +775,32 @@ func TestProcessCookieFailIfRefreshSetAndCookieExpired(t *testing.T) {
}
}
func NewUserInfoEndpointTest() *ProcessCookieTest {
pcTest := NewProcessCookieTestWithDefaults()
pcTest.req, _ = http.NewRequest("GET",
pcTest.opts.ProxyPrefix+"/userinfo", nil)
return pcTest
}
func TestUserInfoEndpointAccepted(t *testing.T) {
test := NewUserInfoEndpointTest()
startSession := &sessions.SessionState{
Email: "john.doe@example.com", AccessToken: "my_access_token"}
test.SaveSession(startSession)
test.proxy.ServeHTTP(test.rw, test.req)
assert.Equal(t, http.StatusOK, test.rw.Code)
bodyBytes, _ := ioutil.ReadAll(test.rw.Body)
assert.Equal(t, "{\"email\":\"john.doe@example.com\"}\n", string(bodyBytes))
}
func TestUserInfoEndpointUnauthorizedOnNoCookieSetError(t *testing.T) {
test := NewUserInfoEndpointTest()
test.proxy.ServeHTTP(test.rw, test.req)
assert.Equal(t, http.StatusUnauthorized, test.rw.Code)
}
func NewAuthOnlyEndpointTest(modifiers ...OptionsModifier) *ProcessCookieTest {
pcTest := NewProcessCookieTestWithOptionsModifiers(modifiers...)
pcTest.req, _ = http.NewRequest("GET",

View File

@ -27,4 +27,6 @@ type RedisStoreOptions struct {
UseSentinel bool `flag:"redis-use-sentinel" cfg:"redis_use_sentinel" env:"OAUTH2_PROXY_REDIS_USE_SENTINEL"`
SentinelMasterName string `flag:"redis-sentinel-master-name" cfg:"redis_sentinel_master_name" env:"OAUTH2_PROXY_REDIS_SENTINEL_MASTER_NAME"`
SentinelConnectionURLs []string `flag:"redis-sentinel-connection-urls" cfg:"redis_sentinel_connection_urls" env:"OAUTH2_PROXY_REDIS_SENTINEL_CONNECTION_URLS"`
RedisCAPath string `flag:"redis-ca-path" cfg:"redis_ca_path" env:"OAUTH2_PROXY_REDIS_CA_PATH"`
RedisInsecureTLS bool `flag:"redis-insecure-skip-tls-verify" cfg:"redis_insecure_skip_tls_verify" env:"OAUTH2_PROXY_REDIS_INSECURE_SKIP_TLS_VERIFY"`
}

View File

@ -4,10 +4,12 @@ import (
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"crypto/x509"
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"io/ioutil"
"net/http"
"strings"
"time"
@ -17,6 +19,7 @@ import (
"github.com/pusher/oauth2_proxy/pkg/apis/sessions"
"github.com/pusher/oauth2_proxy/pkg/cookies"
"github.com/pusher/oauth2_proxy/pkg/encryption"
"github.com/pusher/oauth2_proxy/pkg/logger"
)
// TicketData is a structure representing the ticket used in server session storage
@ -64,6 +67,31 @@ func newRedisClient(opts options.RedisStoreOptions) (*redis.Client, error) {
return nil, fmt.Errorf("unable to parse redis url: %s", err)
}
if opts.RedisInsecureTLS != false {
opt.TLSConfig.InsecureSkipVerify = true
}
if opts.RedisCAPath != "" {
rootCAs, err := x509.SystemCertPool()
if err != nil {
logger.Printf("failed to load system cert pool for redis connection, falling back to empty cert pool")
}
if rootCAs == nil {
rootCAs = x509.NewCertPool()
}
certs, err := ioutil.ReadFile(opts.RedisCAPath)
if err != nil {
return nil, fmt.Errorf("failed to load %q, %v", opts.RedisCAPath, err)
}
// Append our cert to the system pool
if ok := rootCAs.AppendCertsFromPEM(certs); !ok {
logger.Printf("no certs appended, using system certs only")
}
opt.TLSConfig.RootCAs = rootCAs
}
client := redis.NewClient(opt)
return client, nil
}

View File

@ -7,6 +7,7 @@ import (
"net/http"
"net/url"
"path"
"regexp"
"strconv"
"strings"
@ -75,8 +76,8 @@ func (p *GitHubProvider) hasOrg(accessToken string) (bool, error) {
pn := 1
for {
params := url.Values{
"limit": {"200"},
"page": {strconv.Itoa(pn)},
"per_page": {"100"},
"page": {strconv.Itoa(pn)},
}
endpoint := &url.URL{
@ -139,36 +140,90 @@ func (p *GitHubProvider) hasOrgAndTeam(accessToken string) (bool, error) {
} `json:"organization"`
}
params := url.Values{
"limit": {"200"},
type teamsPage []struct {
Name string `json:"name"`
Slug string `json:"slug"`
Org struct {
Login string `json:"login"`
} `json:"organization"`
}
endpoint := &url.URL{
Scheme: p.ValidateURL.Scheme,
Host: p.ValidateURL.Host,
Path: path.Join(p.ValidateURL.Path, "/user/teams"),
RawQuery: params.Encode(),
}
req, _ := http.NewRequest("GET", endpoint.String(), nil)
req.Header.Set("Accept", "application/vnd.github.v3+json")
req.Header.Set("Authorization", fmt.Sprintf("token %s", accessToken))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return false, err
}
pn := 1
last := 0
for {
params := url.Values{
"per_page": {"100"},
"page": {strconv.Itoa(pn)},
}
body, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
return false, err
}
if resp.StatusCode != 200 {
return false, fmt.Errorf(
"got %d from %q %s", resp.StatusCode, endpoint.String(), body)
}
endpoint := &url.URL{
Scheme: p.ValidateURL.Scheme,
Host: p.ValidateURL.Host,
Path: path.Join(p.ValidateURL.Path, "/user/teams"),
RawQuery: params.Encode(),
}
if err := json.Unmarshal(body, &teams); err != nil {
return false, fmt.Errorf("%s unmarshaling %s", err, body)
req, _ := http.NewRequest("GET", endpoint.String(), nil)
req.Header.Set("Accept", "application/vnd.github.v3+json")
req.Header.Set("Authorization", fmt.Sprintf("token %s", accessToken))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return false, err
}
if last == 0 {
// link header may not be obtained
// When paging is not required and all data can be retrieved with a single call
// Conditions for obtaining the link header.
// 1. When paging is required (Example: When the data size is 100 and the page size is 99 or less)
// 2. When it exceeds the paging frame (Example: When there is only 10 records but the second page is called with a page size of 100)
// link header at not last page
// <https://api.github.com/user/teams?page=1&per_page=100>; rel="prev", <https://api.github.com/user/teams?page=1&per_page=100>; rel="last", <https://api.github.com/user/teams?page=1&per_page=100>; rel="first"
// link header at last page (doesn't exist last info)
// <https://api.github.com/user/teams?page=3&per_page=10>; rel="prev", <https://api.github.com/user/teams?page=1&per_page=10>; rel="first"
link := resp.Header.Get("Link")
rep1 := regexp.MustCompile(`(?s).*\<https://api.github.com/user/teams\?page=(.)&per_page=[0-9]+\>; rel="last".*`)
i, converr := strconv.Atoi(rep1.ReplaceAllString(link, "$1"))
// If the last page cannot be taken from the link in the http header, the last variable remains zero
if converr == nil {
last = i
}
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
resp.Body.Close()
return false, err
}
resp.Body.Close()
if resp.StatusCode != 200 {
return false, fmt.Errorf(
"got %d from %q %s", resp.StatusCode, endpoint.String(), body)
}
var tp teamsPage
if err := json.Unmarshal(body, &tp); err != nil {
return false, fmt.Errorf("%s unmarshaling %s", err, body)
}
if len(tp) == 0 {
break
}
teams = append(teams, tp...)
if pn == last {
break
}
if last == 0 {
break
}
pn++
}
var hasOrg bool

View File

@ -32,7 +32,7 @@ func testGitHubBackend(payload []string) *httptest.Server {
pathToQueryMap := map[string][]string{
"/user": {""},
"/user/emails": {""},
"/user/orgs": {"limit=200&page=1", "limit=200&page=2", "limit=200&page=3"},
"/user/orgs": {"page=1&per_page=100", "page=2&per_page=100", "page=3&per_page=100"},
}
return httptest.NewServer(http.HandlerFunc(