Use pagewriter to render robots txt page
This commit is contained in:
		
							parent
							
								
									9782fc7fa4
								
							
						
					
					
						commit
						f3bd61b371
					
				|  | @ -577,13 +577,7 @@ func (p *OAuthProxy) serveHTTP(rw http.ResponseWriter, req *http.Request) { | ||||||
| 
 | 
 | ||||||
| // RobotsTxt disallows scraping pages from the OAuthProxy
 | // RobotsTxt disallows scraping pages from the OAuthProxy
 | ||||||
| func (p *OAuthProxy) RobotsTxt(rw http.ResponseWriter, req *http.Request) { | func (p *OAuthProxy) RobotsTxt(rw http.ResponseWriter, req *http.Request) { | ||||||
| 	_, err := fmt.Fprintf(rw, "User-agent: *\nDisallow: /") | 	p.pageWriter.WriteRobotsTxt(rw, req) | ||||||
| 	if err != nil { |  | ||||||
| 		logger.Printf("Error writing robots.txt: %v", err) |  | ||||||
| 		p.ErrorPage(rw, req, http.StatusInternalServerError, err.Error()) |  | ||||||
| 		return |  | ||||||
| 	} |  | ||||||
| 	rw.WriteHeader(http.StatusOK) |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ErrorPage writes an error response
 | // ErrorPage writes an error response
 | ||||||
|  |  | ||||||
|  | @ -57,7 +57,7 @@ func TestRobotsTxt(t *testing.T) { | ||||||
| 	req, _ := http.NewRequest("GET", "/robots.txt", nil) | 	req, _ := http.NewRequest("GET", "/robots.txt", nil) | ||||||
| 	proxy.ServeHTTP(rw, req) | 	proxy.ServeHTTP(rw, req) | ||||||
| 	assert.Equal(t, 200, rw.Code) | 	assert.Equal(t, 200, rw.Code) | ||||||
| 	assert.Equal(t, "User-agent: *\nDisallow: /", rw.Body.String()) | 	assert.Equal(t, "User-agent: *\nDisallow: /\n", rw.Body.String()) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| func TestIsValidRedirect(t *testing.T) { | func TestIsValidRedirect(t *testing.T) { | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue