* The Crawler Sitemap Class.
defined( 'WPINC' ) || exit();
* Maintains and persists crawler sitemap/blacklist state, parses custom sitemaps,
* and exposes helpers to query & mutate crawler results.
class Crawler_Map extends Root {
const LOG_TAG = 'ππΊοΈ';
* Site URL used to simplify URLs.
* Main crawler table name.
* Crawler blacklist table name.
* Timeout (seconds) when fetching sitemaps.
private $_conf_map_timeout;
* Collected URLs from parsed sitemaps.
public function __construct() {
$this->_site_url = get_site_url();
$this->__data = Data::cls();
$this->_tb = $this->__data->tb( 'crawler' );
$this->_tb_blacklist = $this->__data->tb( 'crawler_blacklist' );
// Specify the timeout while parsing the sitemap.
$this->_conf_map_timeout = defined( 'LITESPEED_CRAWLER_MAP_TIMEOUT' ) ? constant( 'LITESPEED_CRAWLER_MAP_TIMEOUT' ) : 180;
* Save URLs crawl status into DB.
* @param array<int,array<int,array{url:string,code:int}>> $items Map of bit => [ id => [url, code] ].
* @param int $curr_crawler Current crawler index (0-based).
* @return array<int,array>
public function save_map_status( $items, $curr_crawler ) {
Utility::compatibility();
$total_crawler = count( Crawler::cls()->list_crawlers() );
$total_crawler_pos = $total_crawler - 1;
// Replace current crawler's position.
$curr_crawler = (int) $curr_crawler;
foreach ( $items as $bit => $ids ) {
// $ids = [ id => [ url, code ], ... ].
self::debug( 'Update map [crawler] ' . $curr_crawler . ' [bit] ' . $bit . ' [count] ' . count( $ids ) );
// Update res first, then reason
$right_pos = $total_crawler_pos - $curr_crawler;
$id_all = implode(',', array_map('intval', array_keys($ids)));
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.InterpolatedNotPrepared
$wpdb->query("UPDATE `$this->_tb` SET res = CONCAT( LEFT( res, $curr_crawler ), '$bit', RIGHT( res, $right_pos ) ) WHERE id IN ( $id_all )");
if (Crawler::STATUS_BLACKLIST === $bit || Crawler::STATUS_NOCACHE === $bit) {
$q = "SELECT a.id, a.url FROM `$this->_tb_blacklist` a LEFT JOIN `$this->_tb` b ON b.url=a.url WHERE b.id IN ( $id_all )";
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$existing = $wpdb->get_results($q, ARRAY_A);
// Update current crawler status tag in existing blacklist
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.InterpolatedNotPrepared, WordPress.DB.PreparedSQL.NotPrepared
$count = $wpdb->query("UPDATE `$this->_tb_blacklist` SET res = CONCAT( LEFT( res, $curr_crawler ), '$bit', RIGHT( res, $right_pos ) ) WHERE id IN ( " . implode(',', array_column($existing, 'id')) . ' )');
self::debug('Update blacklist [count] ' . $count);
if (count($ids) > count($existing)) {
$new_urls = array_diff(array_column($ids, 'url'), array_column($existing, 'url'));
self::debug('Insert into blacklist [count] ' . count($new_urls));
$q = "INSERT INTO `$this->_tb_blacklist` ( url, res, reason ) VALUES " . implode(',', array_fill(0, count($new_urls), '( %s, %s, %s )'));
$res = array_fill(0, $total_crawler, '-');
$res[$curr_crawler] = $bit;
$res = implode('', $res);
$default_reason = $total_crawler > 1 ? str_repeat(',', $total_crawler - 1) : ''; // Pre-populate default reason value first, update later
foreach ($new_urls as $url) {
$data[] = $default_reason;
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$wpdb->query($wpdb->prepare($q, $data));
// Update sitemap reason w/ HTTP code.
foreach ( $ids as $row_id => $row ) {
$code = (int) $row['code'];
if ( empty( $reason_array[ $code ] ) ) {
$reason_array[ $code ] = [];
$reason_array[ $code ][] = (int) $row_id;
foreach ($reason_array as $code => $v2) {
if ($curr_crawler < $total_crawler_pos) {
// phpcs:ignore WordPress.DB
$count = $wpdb->query( "UPDATE `$this->_tb` SET reason=CONCAT(SUBSTRING_INDEX(reason, ',', $curr_crawler), '$code', SUBSTRING_INDEX(reason, ',', -$right_pos)) WHERE id IN (" . implode(',', $v2) . ')' );
self::debug("Update map reason [code] $code [pos] left $curr_crawler right -$right_pos [count] $count");
// Update blacklist reason
if (Crawler::STATUS_BLACKLIST === $bit || Crawler::STATUS_NOCACHE === $bit) {
// phpcs:ignore WordPress.DB
$count = $wpdb->query( "UPDATE `$this->_tb_blacklist` a LEFT JOIN `$this->_tb` b ON b.url = a.url SET a.reason=CONCAT(SUBSTRING_INDEX(a.reason, ',', $curr_crawler), '$code', SUBSTRING_INDEX(a.reason, ',', -$right_pos)) WHERE b.id IN (" . implode(',', $v2) . ')' );
self::debug("Update blacklist [code] $code [pos] left $curr_crawler right -$right_pos [count] $count");
* Add one record to blacklist.
* NOTE: $id is sitemap table ID.
* @param int $id Sitemap row ID.
public function blacklist_add( $id ) {
$total_crawler = count( Crawler::cls()->list_crawlers() );
$res = str_repeat(Crawler::STATUS_BLACKLIST, $total_crawler);
$reason = implode(',', array_fill(0, $total_crawler, 'Man'));
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.InterpolatedNotPrepared
$row = $wpdb->get_row("SELECT a.url, b.id FROM `$this->_tb` a LEFT JOIN `$this->_tb_blacklist` b ON b.url = a.url WHERE a.id = '$id'", ARRAY_A);
self::debug('blacklist failed to add [id] ' . $id);
self::debug('Add to blacklist [url] ' . $row['url']);
$q = "UPDATE `$this->_tb` SET res = %s, reason = %s WHERE id = %d";
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$wpdb->query($wpdb->prepare($q, [ $res, $reason, $id ]));
$q = "UPDATE `$this->_tb_blacklist` SET res = %s, reason = %s WHERE id = %d";
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$wpdb->query($wpdb->prepare($q, [ $res, $reason, $row['id'] ]));
$q = "INSERT INTO `$this->_tb_blacklist` (url, res, reason) VALUES (%s, %s, %s)";
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$wpdb->query($wpdb->prepare($q, [ $row['url'], $res, $reason ]));
* Delete one record from blacklist.
* @param int $id Blacklist row ID.
public function blacklist_del( $id ) {
if ( ! $this->__data->tb_exist( 'crawler_blacklist' ) ) {
self::debug('blacklist delete [id] ' . $id);
"UPDATE `%s` SET res=REPLACE(REPLACE(res, '%s', '-'), '%s', '-') WHERE url=(SELECT url FROM `%s` WHERE id=%d)",
Crawler::STATUS_BLACKLIST,
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.InterpolatedNotPrepared
$wpdb->query("DELETE FROM `$this->_tb_blacklist` WHERE id='$id'");
public function blacklist_empty() {
if ( ! $this->__data->tb_exist( 'crawler_blacklist' ) ) {
self::debug('Truncate blacklist');
$sql = sprintf("UPDATE `%s` SET res=REPLACE(REPLACE(res, '%s', '-'), '%s', '-')", $this->_tb, Crawler::STATUS_NOCACHE, Crawler::STATUS_BLACKLIST);
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.InterpolatedNotPrepared
$wpdb->query("TRUNCATE `$this->_tb_blacklist`");
* @param int|false $limit Number of rows to fetch, or false for all.
* @param int|false $offset Offset for pagination, or false to auto-calc.
* @return array<int,array<string,mixed>>
public function list_blacklist( $limit = false, $offset = false ) {
if ( ! $this->__data->tb_exist( 'crawler_blacklist' ) ) {
$q = "SELECT * FROM `$this->_tb_blacklist` ORDER BY id DESC";
if ( false !== $limit ) {
if ( false === $offset ) {
$total = $this->count_blacklist();
$offset = Utility::pagination($total, $limit, true);
// phpcs:ignore WordPress.DB.PreparedSQL.NotPrepared
$q = $wpdb->prepare($q, $offset, $limit);
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
return $wpdb->get_results($q, ARRAY_A);
public function count_blacklist() {
if ( ! $this->__data->tb_exist( 'crawler_blacklist' ) ) {
$q = "SELECT COUNT(*) FROM `$this->_tb_blacklist`";
// phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
return $wpdb->get_var($q);
public function empty_map() {
Data::cls()->tb_del( 'crawler' );
$msg = __( 'Sitemap cleaned successfully', 'litespeed-cache' );
Admin_Display::success( $msg );
* List generated sitemap.
* @param int $limit Number of rows per page.
* @param int|bool $offset Offset for pagination, or false to auto-calc.
* @return array<int,array<string,mixed>>
public function list_map( $limit, $offset = false ) {
if ( ! $this->__data->tb_exist( 'crawler' ) ) {
if ( false === $offset ) {
$total = $this->count_map();
$offset = Utility::pagination($total, $limit, true);
$type = Router::verify_type();
// phpcs:ignore WordPress.Security.NonceVerification.Missing
if ( ! empty( $_POST['kw'] ) ) {
// phpcs:ignore WordPress.Security.NonceVerification.Missing
$kw = sanitize_text_field( wp_unslash( $_POST['kw'] ) );
$q = "SELECT * FROM `$this->_tb` WHERE url LIKE %s";
$q .= " AND res LIKE '%" . Crawler::STATUS_HIT . "%'";
if ( 'miss' === $type ) {
$q .= " AND res LIKE '%" . Crawler::STATUS_MISS . "%'";
if ( 'blacklisted' === $type ) {
$q .= " AND res LIKE '%" . Crawler::STATUS_BLACKLIST . "%'";
$q .= ' ORDER BY id LIMIT %d, %d';
$req_uri_like = '%' . $wpdb->esc_like( $kw ) . '%';
return $wpdb->get_results( $wpdb->prepare( $q, $req_uri_like, $offset, $limit ), ARRAY_A ); // phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
$q = "SELECT * FROM `$this->_tb`";
$q .= " WHERE res LIKE '%" . Crawler::STATUS_HIT . "%'";
if ( 'miss' === $type ) {
$q .= " WHERE res LIKE '%" . Crawler::STATUS_MISS . "%'";
if ( 'blacklisted' === $type ) {
$q .= " WHERE res LIKE '%" . Crawler::STATUS_BLACKLIST . "%'";
$q .= ' ORDER BY id LIMIT %d, %d';
return $wpdb->get_results( $wpdb->prepare( $q, $offset, $limit ), ARRAY_A ); // phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
public function count_map() {
if ( ! $this->__data->tb_exist( 'crawler' ) ) {
$q = "SELECT COUNT(*) FROM `$this->_tb`";
$type = Router::verify_type();
$q .= " WHERE res LIKE '%" . Crawler::STATUS_HIT . "%'";
if ( 'miss' === $type ) {
$q .= " WHERE res LIKE '%" . Crawler::STATUS_MISS . "%'";
if ( 'blacklisted' === $type ) {
$q .= " WHERE res LIKE '%" . Crawler::STATUS_BLACKLIST . "%'";
return $wpdb->get_var( $q ); // phpcs:ignore WordPress.DB.DirectDatabaseQuery.DirectQuery, WordPress.DB.PreparedSQL.NotPrepared
* @param bool $manual Whether triggered manually from UI.
public function gen( $manual = false ) {
Admin_Display::error( __( 'No valid sitemap parsed for crawler.', 'litespeed-cache' ) );
if ( ! wp_doing_cron() && $manual ) {
$msg = sprintf( __( 'Sitemap created successfully: %d items', 'litespeed-cache' ), $count );
Admin_Display::success( $msg );
* @return int|false Number of URLs generated or false on failure.
private function _gen() {
if ( ! $this->__data->tb_exist( 'crawler' ) ) {
$this->__data->tb_create( 'crawler' );
if ( ! $this->__data->tb_exist( 'crawler_blacklist' ) ) {
$this->__data->tb_create( 'crawler_blacklist' );
$sitemap = $this->conf( Base::O_CRAWLER_SITEMAP );
$offset = strlen( $this->_site_url );
$sitemap = Utility::sanitize_lines( $sitemap );
foreach ( $sitemap as $this_map ) {
$this->_parse( $this_map );
} catch ( \Exception $e ) {
self::debug( 'β failed to parse custom sitemap: ' . $e->getMessage() );
if ( is_array( $this->_urls ) && ! empty( $this->_urls ) ) {
if ( defined( 'LITESPEED_CRAWLER_DROP_DOMAIN' ) && constant( 'LITESPEED_CRAWLER_DROP_DOMAIN' ) ) {
foreach ( $this->_urls as $k => $v ) {
if ( 0 !== stripos( $v, $this->_site_url ) ) {
unset( $this->_urls[ $k ] );