Add robots.txt allowing all agents
[lgpl/argeo-commons.git] / org.argeo.cms / src / org / argeo / cms / internal / kernel / NodeHttp.java
index 320f4e102ab8df05c76f57bf703e35d0a24c9371..2e5a7ddfeb58a7d59e9a1f30a2045ec292b2d6c0 100644 (file)
@@ -57,6 +57,8 @@ class NodeHttp implements KernelConstants, ArgeoJcrConstants {
                try {
                        httpService.registerServlet("/!", new LinkServlet(repository),
                                        null, null);
+                       httpService.registerServlet("/robots.txt", new RobotServlet(),
+                                       null, null);
                } catch (Exception e) {
                        throw new CmsException("Cannot register filters", e);
                }
@@ -203,6 +205,22 @@ class NodeHttp implements KernelConstants, ArgeoJcrConstants {
                }
        }
 
+       class RobotServlet extends HttpServlet {
+               private static final long serialVersionUID = 7935661175336419089L;
+
+               @Override
+               protected void service(HttpServletRequest request,
+                               HttpServletResponse response) throws ServletException,
+                               IOException {
+                       PrintWriter writer = response.getWriter();
+                       writer.append("User-agent: *\n");
+                       writer.append("Disallow:\n");
+                       response.setHeader("Content-Type", "text/plain");
+                       writer.flush();
+               }
+
+       }
+
        /** Intercepts all requests. Authenticates. */
        class RootFilter extends HttpFilter {