mirror of
https://github.com/BookStackApp/BookStack.git
synced 2025-07-30 04:23:11 +03:00
Search Index: Fixed SQL error when indexing large pages
Due to hitting statement placeholder limits (typically 65k) when inserting index terms for single page. Added test to cover. Also added skipped tests for tests we don't always want to run. For #5322
This commit is contained in:
@ -30,7 +30,7 @@ class SearchIndex
|
||||
{
|
||||
$this->deleteEntityTerms($entity);
|
||||
$terms = $this->entityToTermDataArray($entity);
|
||||
SearchTerm::query()->insert($terms);
|
||||
$this->insertTerms($terms);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -46,10 +46,7 @@ class SearchIndex
|
||||
array_push($terms, ...$entityTerms);
|
||||
}
|
||||
|
||||
$chunkedTerms = array_chunk($terms, 500);
|
||||
foreach ($chunkedTerms as $termChunk) {
|
||||
SearchTerm::query()->insert($termChunk);
|
||||
}
|
||||
$this->insertTerms($terms);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -99,6 +96,19 @@ class SearchIndex
|
||||
$entity->searchTerms()->delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the given terms into the database.
|
||||
* Chunks through the given terms to remain within database limits.
|
||||
* @param array[] $terms
|
||||
*/
|
||||
protected function insertTerms(array $terms): void
|
||||
{
|
||||
$chunkedTerms = array_chunk($terms, 500);
|
||||
foreach ($chunkedTerms as $termChunk) {
|
||||
SearchTerm::query()->insert($termChunk);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a scored term array from the given text, where the keys are the terms
|
||||
* and the values are their scores.
|
||||
|
Reference in New Issue
Block a user