Compare commits
10 Commits
988e3bac90
...
3c896ec409
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3c896ec409 | ||
|
|
7e8302c830 | ||
|
|
898ac33376 | ||
|
|
4671e4845a | ||
|
|
17fe057765 | ||
|
|
bf49549002 | ||
|
|
2999de9cbd | ||
|
|
62ac0b2512 | ||
|
|
88d8ceed0b | ||
|
|
36b243e0d4 |
@@ -83,7 +83,7 @@ class BookSearch
|
||||
|
||||
if($queryField == 'mediaType' and $queryText=='noCDS'){
|
||||
$queryText='-mediaType:CDS';
|
||||
} else if (strlen($queryField) > 0) {
|
||||
} else if (isset($queryField) && strlen($queryField) > 0) {
|
||||
$queryText = sprintf('%s:"%s"', $queryField, $queryText);
|
||||
}
|
||||
|
||||
@@ -104,6 +104,10 @@ class BookSearch
|
||||
$this->filterQueryParts[] = sprintf('%s:[%s TO %s]', $field, $min, $max);
|
||||
}
|
||||
|
||||
public function getQuery() {
|
||||
return $this->query;
|
||||
}
|
||||
|
||||
public function addSortField($field, $order = \SolrQuery::ORDER_DESC)
|
||||
{
|
||||
$this->query->addSortField($field, $order);
|
||||
@@ -175,6 +179,7 @@ class BookSearch
|
||||
$books = array();
|
||||
if(isset($results['response']['docs']) && is_array($results['response']['docs'])) {
|
||||
foreach($results['response']['docs'] as $r) {
|
||||
if (!isset($r['id'])) continue;
|
||||
$books[$r['id']] = $r;
|
||||
}
|
||||
}
|
||||
@@ -280,24 +285,35 @@ class BookSearch
|
||||
* @throws WebException
|
||||
*/
|
||||
public static function getBooks(array $codes, $field = 'code') {
|
||||
// it is faster to do multiple small request to Solr rather than one big so separate
|
||||
// in chunks if we are above the limit. 15 was found by testing and seems to be a sweet spot
|
||||
$limit = 15;
|
||||
$count = count($codes);
|
||||
if($count > $limit) {
|
||||
$parts = array_chunk($codes, $limit);
|
||||
$books = array();
|
||||
foreach($parts as $p) {
|
||||
// if we use array_merge here the numerical keys (book code) will be lost
|
||||
$books += self::getBooks($p, $field);
|
||||
}
|
||||
return $books;
|
||||
$bs = self::getLastInstance();
|
||||
$books = [];
|
||||
foreach (array_chunk($codes, $limit) as $chunk) {
|
||||
// Reset query state for this chunk
|
||||
$bs->clearQueryParts();
|
||||
$bs->addOrQuery($chunk, $field);
|
||||
// Use count($chunk) so you only request as many rows as there are codes in this chunk
|
||||
$results = $bs->getResults(0, count($chunk));
|
||||
$books += $results['books'];
|
||||
}
|
||||
return $books;
|
||||
}
|
||||
|
||||
public function clearQueryParts(): void {
|
||||
$this->queryParts = [];
|
||||
$this->filterQueryParts = [];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* it is faster to do multiple small request to Solr rather than one big so
|
||||
* check self::getBooks(array $codes, $field = 'code') for a chunked request
|
||||
* in chunks if we are above the limit. 15 was found by testing and seems to be a sweet spot
|
||||
*/
|
||||
public static function getBooksFull(array $codes, $field = 'code') {
|
||||
$bs = self::getLastInstance();
|
||||
$bs->addOrQuery($codes, $field);
|
||||
|
||||
$results = $bs->getResults(0, $count);
|
||||
$results = $bs->getResults(0, count($codes));
|
||||
return $results['books'];
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user