1
0
mirror of https://github.com/oauth2-proxy/oauth2-proxy.git synced 2025-06-15 00:15:00 +02:00

Provide a robots.txt that denies all crawlers

This commit is contained in:
Mike Bland
2015-05-10 15:15:52 -04:00
parent 26170c56af
commit 5b07d9fcef
3 changed files with 28 additions and 0 deletions

View File

@ -67,6 +67,22 @@ func TestEncodedSlashes(t *testing.T) {
}
}
func TestRobotsTxt(t *testing.T) {
opts := NewOptions()
opts.Upstreams = append(opts.Upstreams, "unused")
opts.ClientID = "bazquux"
opts.ClientSecret = "foobar"
opts.CookieSecret = "xyzzyplugh"
opts.Validate()
proxy := NewOauthProxy(opts, func(string) bool { return true })
rw := httptest.NewRecorder()
req, _ := http.NewRequest("GET", "/robots.txt", nil)
proxy.ServeHTTP(rw, req)
assert.Equal(t, 200, rw.Code)
assert.Equal(t, "User-agent: *\nDisallow: /", rw.Body.String())
}
type TestProvider struct {
*providers.ProviderData
EmailAddress string