Commit 5cc9569a authored by Christopher Guindon's avatar Christopher Guindon
Browse files

website hosted on netlify should not disallow crawling


Signed-off-by: Christopher Guindon's avatarChristopher Guindon <chris.guindon@eclipse-foundation.org>
parent 4d405e49
......@@ -17,8 +17,7 @@
"watch": "NODE_ENV=development webpack --watch --progress --config=node_modules/laravel-mix/setup/webpack.config.js",
"hot": "NODE_ENV=development webpack-dev-server --inline --hot --config=node_modules/laravel-mix/setup/webpack.config.js",
"production": "NODE_ENV=production webpack --progress --config=node_modules/laravel-mix/setup/webpack.config.js",
"disallow_robots_txt": "echo \"User-agent: *\r\nDisallow: /\" > layouts/robots.txt",
"build_netlify": "NODE_ENV=production npm run disallow_robots_txt && npm run production",
"build_netlify": "NODE_ENV=production npm run production",
"postinstall": "NODE_ENV=production npm run production"
},
"dependencies": {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment