mirror of
https://github.com/oauth2-proxy/oauth2-proxy.git
synced 2024-11-24 08:52:25 +02:00
Use pagewriter to render robots txt page
This commit is contained in:
parent
9782fc7fa4
commit
f3bd61b371
@ -577,13 +577,7 @@ func (p *OAuthProxy) serveHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
// RobotsTxt disallows scraping pages from the OAuthProxy
|
||||
func (p *OAuthProxy) RobotsTxt(rw http.ResponseWriter, req *http.Request) {
|
||||
_, err := fmt.Fprintf(rw, "User-agent: *\nDisallow: /")
|
||||
if err != nil {
|
||||
logger.Printf("Error writing robots.txt: %v", err)
|
||||
p.ErrorPage(rw, req, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
p.pageWriter.WriteRobotsTxt(rw, req)
|
||||
}
|
||||
|
||||
// ErrorPage writes an error response
|
||||
|
@ -57,7 +57,7 @@ func TestRobotsTxt(t *testing.T) {
|
||||
req, _ := http.NewRequest("GET", "/robots.txt", nil)
|
||||
proxy.ServeHTTP(rw, req)
|
||||
assert.Equal(t, 200, rw.Code)
|
||||
assert.Equal(t, "User-agent: *\nDisallow: /", rw.Body.String())
|
||||
assert.Equal(t, "User-agent: *\nDisallow: /\n", rw.Body.String())
|
||||
}
|
||||
|
||||
func TestIsValidRedirect(t *testing.T) {
|
||||
|
Loading…
Reference in New Issue
Block a user