diff --git a/.htaccess b/.htaccess index 4716fa1..c0c52ad 100644 --- a/.htaccess +++ b/.htaccess @@ -122,6 +122,9 @@ AddEncoding gzip svgz # uncomment the following line: # RewriteBase / + # Pass requests for robots.txt over to index.php, even if robots.txt already exists. + RewriteRule ^robots.txt index.php?/$1$2 [last,qsappend] + # Redirect common PHP files to their new locations. RewriteCond %{REQUEST_URI} ^(.*)?/(install.php) [OR] RewriteCond %{REQUEST_URI} ^(.*)?/(rebuild.php) diff --git a/core/modules/system/system.api.php b/core/modules/system/system.api.php index 8534ebf..f25d53e 100644 --- a/core/modules/system/system.api.php +++ b/core/modules/system/system.api.php @@ -34,5 +34,29 @@ function hook_system_themes_page_alter(&$theme_groups) { } /** + * Appends content to the robots.txt. + * + * @return array + * An array of strings to append to the end of the robots.txt file. + */ +function hook_robotstxt() { + return ['Disallow: /tmp/']; +} + +/** + * Appends content to the robots.txt. + * + * @param $robotstxt + * An array of strings printed in the robots.txt. + */ +function hook_robotstxt_alter(&$robotstxt) { + foreach ($robotstxt as $index => $line) { + if ($line == 'Disallow: /tmp/') { + $robotstxt[$index] = 'Disallow: /temp/'; + } + } +} + +/** * @} End of "addtogroup hooks". */ diff --git a/core/modules/system/system.module b/core/modules/system/system.module index 2f627be..6b9d8c7 100644 --- a/core/modules/system/system.module +++ b/core/modules/system/system.module @@ -1454,3 +1454,22 @@ function system_query_entity_reference_alter(AlterableInterface $query) { $handler = $query->getMetadata('entity_reference_selection_handler'); $handler->entityQueryAlter($query); } + +/** + * Implements hook_robotstxt(). + */ +function system_robotstxt() { + // Cache the robots.txt content from the file system. + $robotstxt = &drupal_static(__FUNCTION__, array()); + if (empty($robotstxt)) { + if ($cache = \Drupal::cache()->get(__FUNCTION__)) { + $robotstxt = $cache->data; + } + else { + $robotstxt = file(realpath('robots.txt'), FILE_IGNORE_NEW_LINES); + \Drupal::cache()->set(__FUNCTION__, $robotstxt); + } + } + + return $robotstxt; +} diff --git a/core/modules/system/system.routing.yml b/core/modules/system/system.routing.yml index ae89fb0..9772e72 100644 --- a/core/modules/system/system.routing.yml +++ b/core/modules/system/system.routing.yml @@ -507,3 +507,10 @@ system.csrftoken: _controller: '\Drupal\system\Controller\CsrfTokenController::csrfToken' requirements: _access: 'TRUE' + +system.robotstxt: + path: '/robots.txt' + defaults: + _controller: '\Drupal\system\Controller\RobotsTxtController::robotstxt' + requirements: + _access: 'TRUE' \ No newline at end of file diff --git a/sites/default/default.services.yml b/sites/default/default.services.yml index e1bbbc7..23f6483 100644 --- a/sites/default/default.services.yml +++ b/sites/default/default.services.yml @@ -153,22 +153,3 @@ parameters: - sftp - webcal - rtsp - - # Configure Cross-Site HTTP requests (CORS). - # Read https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS - # for more information about the topic in general. - # Note: By default the configuration is disabled. - cors.config: - enabled: false - # Specify allowed headers, like 'x-allowed-header'. - allowedHeaders: [] - # Specify allowed request methods, specify ['*'] to allow all possible ones. - allowedMethods: [] - # Configure requests allowed from specific origins. - allowedOrigins: ['*'] - # Sets the Access-Control-Expose-Headers header. - exposedHeaders: false - # Sets the Access-Control-Max-Age header. - maxAge: false - # Sets the Access-Control-Allow-Credentials header. - supportsCredentials: false diff --git a/sites/default/default.settings.php b/sites/default/default.settings.php index 94a1e04..770a3a7 100644 --- a/sites/default/default.settings.php +++ b/sites/default/default.settings.php @@ -144,11 +144,6 @@ * @code * 'prefix' => 'main_', * @endcode - * - * Per-table prefixes are deprecated as of Drupal 8.2, and will be removed in - * Drupal 9.0. After that, only a single prefix for all tables will be - * supported. - * * To provide prefixes for specific tables, set 'prefix' as an array. * The array's keys are the table names and the values are the prefixes. * The 'default' element is mandatory and holds the prefix for any tables @@ -330,6 +325,9 @@ * * You can also define an array of host names that can be accessed directly, * bypassing the proxy, in $settings['http_client_config']['proxy']['no']. + * + * If these settings are not configured, the system environment variables + * HTTP_PROXY, HTTPS_PROXY, and NO_PROXY on the web server will be used instead. */ # $settings['http_client_config']['proxy']['http'] = 'http://proxy_user:proxy_pass@example.com:8080'; # $settings['http_client_config']['proxy']['https'] = 'http://proxy_user:proxy_pass@example.com:8080'; @@ -684,15 +682,6 @@ # $settings['container_base_class'] = '\Drupal\Core\DependencyInjection\Container'; /** - * Override the default yaml parser class. - * - * Provide a fully qualified class name here if you would like to provide an - * alternate implementation YAML parser. The class must implement the - * \Drupal\Component\Serialization\SerializationInterface interface. - */ -# $settings['yaml_parser_class'] = NULL; - -/** * Trusted host configuration. * * Drupal core can use the Symfony trusted host mechanism to prevent HTTP Host @@ -730,21 +719,6 @@ */ /** - * The default list of directories that will be ignored by Drupal's file API. - * - * By default ignore node_modules and bower_components folders to avoid issues - * with common frontend tools and recursive scanning of directories looking for - * extensions. - * - * @see file_scan_directory() - * @see \Drupal\Core\Extension\ExtensionDiscovery::scanDirectory() - */ -$settings['file_scan_ignore_directories'] = [ - 'node_modules', - 'bower_components', -]; - -/** * Load local development override configuration, if available. * * Use settings.local.php to override variables on secondary (staging,