mirror of
https://github.com/postgres/postgres.git
synced 2025-05-26 18:17:33 +03:00
Avoid running out of memory during hash_create, by not passing a
number-of-buckets that exceeds the size we actually plan to allow the hash table to grow to. Per trouble report from Sean Shanny.
This commit is contained in:
parent
e8aa10ee47
commit
7af16b2a25
@ -8,7 +8,7 @@
|
|||||||
*
|
*
|
||||||
*
|
*
|
||||||
* IDENTIFICATION
|
* IDENTIFICATION
|
||||||
* $PostgreSQL: pgsql/src/backend/executor/nodeIndexscan.c,v 1.87 2003/11/29 19:51:48 pgsql Exp $
|
* $PostgreSQL: pgsql/src/backend/executor/nodeIndexscan.c,v 1.88 2003/12/30 20:05:05 tgl Exp $
|
||||||
*
|
*
|
||||||
*-------------------------------------------------------------------------
|
*-------------------------------------------------------------------------
|
||||||
*/
|
*/
|
||||||
@ -953,22 +953,28 @@ static void
|
|||||||
create_duphash(IndexScanState *node)
|
create_duphash(IndexScanState *node)
|
||||||
{
|
{
|
||||||
HASHCTL hash_ctl;
|
HASHCTL hash_ctl;
|
||||||
|
long nbuckets;
|
||||||
|
|
||||||
|
node->iss_MaxHash = (SortMem * 1024L) /
|
||||||
|
(MAXALIGN(sizeof(HASHELEMENT)) + MAXALIGN(sizeof(DupHashTabEntry)));
|
||||||
MemSet(&hash_ctl, 0, sizeof(hash_ctl));
|
MemSet(&hash_ctl, 0, sizeof(hash_ctl));
|
||||||
hash_ctl.keysize = SizeOfIptrData;
|
hash_ctl.keysize = SizeOfIptrData;
|
||||||
hash_ctl.entrysize = sizeof(DupHashTabEntry);
|
hash_ctl.entrysize = sizeof(DupHashTabEntry);
|
||||||
hash_ctl.hash = tag_hash;
|
hash_ctl.hash = tag_hash;
|
||||||
hash_ctl.hcxt = CurrentMemoryContext;
|
hash_ctl.hcxt = CurrentMemoryContext;
|
||||||
|
nbuckets = (long) ceil(node->ss.ps.plan->plan_rows);
|
||||||
|
if (nbuckets < 1)
|
||||||
|
nbuckets = 1;
|
||||||
|
if (nbuckets > node->iss_MaxHash)
|
||||||
|
nbuckets = node->iss_MaxHash;
|
||||||
node->iss_DupHash = hash_create("DupHashTable",
|
node->iss_DupHash = hash_create("DupHashTable",
|
||||||
(long) ceil(node->ss.ps.plan->plan_rows),
|
nbuckets,
|
||||||
&hash_ctl,
|
&hash_ctl,
|
||||||
HASH_ELEM | HASH_FUNCTION | HASH_CONTEXT);
|
HASH_ELEM | HASH_FUNCTION | HASH_CONTEXT);
|
||||||
if (node->iss_DupHash == NULL)
|
if (node->iss_DupHash == NULL)
|
||||||
ereport(ERROR,
|
ereport(ERROR,
|
||||||
(errcode(ERRCODE_OUT_OF_MEMORY),
|
(errcode(ERRCODE_OUT_OF_MEMORY),
|
||||||
errmsg("out of memory")));
|
errmsg("out of memory")));
|
||||||
node->iss_MaxHash = (SortMem * 1024L) /
|
|
||||||
(MAXALIGN(sizeof(HASHELEMENT)) + MAXALIGN(sizeof(DupHashTabEntry)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int
|
int
|
||||||
|
Loading…
x
Reference in New Issue
Block a user