1
0
mirror of https://github.com/postgres/postgres.git synced 2025-05-18 17:41:14 +03:00

Backpatch fix from HEAD:

Prevent a backend crash when processing CREATE TABLE commands with
more than 65K columns, or when the created table has more than 65K columns
due to adding inherited columns from parent relations. Fix a similar
crash when processing SELECT queries with more than 65K target list
entries. In all three cases we would eventually detect the error and
elog, but the check was being made too late.
This commit is contained in:
Neil Conway 2004-11-17 00:18:26 +00:00
parent a2e1b146fe
commit b990232c5d
2 changed files with 41 additions and 2 deletions

View File

@ -8,7 +8,7 @@
*
*
* IDENTIFICATION
* $Header: /cvsroot/pgsql/src/backend/commands/tablecmds.c,v 1.91.2.1 2004/07/17 17:28:47 tgl Exp $
* $Header: /cvsroot/pgsql/src/backend/commands/tablecmds.c,v 1.91.2.2 2004/11/17 00:18:23 neilc Exp $
*
*-------------------------------------------------------------------------
*/
@ -488,6 +488,23 @@ MergeAttributes(List *schema, List *supers, bool istemp,
* defaults */
int child_attno;
/*
* Check for and reject tables with too many columns. We perform
* this check relatively early for two reasons: (a) we don't run
* the risk of overflowing an AttrNumber in subsequent code (b) an
* O(n^2) algorithm is okay if we're processing <= 1600 columns,
* but could take minutes to execute if the user attempts to
* create a table with hundreds of thousands of columns.
*
* Note that we also need to check that any we do not exceed this
* figure after including columns from inherited relations.
*/
if (length(schema) > MaxHeapAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_TOO_MANY_COLUMNS),
errmsg("tables can have at most %d columns",
MaxHeapAttributeNumber)));
/*
* Check for duplicate names in the explicit list of attributes.
*
@ -796,6 +813,16 @@ MergeAttributes(List *schema, List *supers, bool istemp,
}
schema = inhSchema;
/*
* Check that we haven't exceeded the legal # of columns after
* merging in inherited columns.
*/
if (length(schema) > MaxHeapAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_TOO_MANY_COLUMNS),
errmsg("tables can have at most %d columns",
MaxHeapAttributeNumber)));
}
/*

View File

@ -6,7 +6,7 @@
* Portions Copyright (c) 1996-2003, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $Header: /cvsroot/pgsql/src/backend/parser/analyze.c,v 1.290.2.1 2003/11/05 22:00:52 tgl Exp $
* $Header: /cvsroot/pgsql/src/backend/parser/analyze.c,v 1.290.2.2 2004/11/17 00:18:26 neilc Exp $
*
*-------------------------------------------------------------------------
*/
@ -440,6 +440,18 @@ transformStmt(ParseState *pstate, Node *parseTree,
result->querySource = QSRC_ORIGINAL;
result->canSetTag = true;
/*
* Check that we did not produce too many resnos; at the very
* least we cannot allow more than 2^16, since that would exceed
* the range of a AttrNumber. It seems safest to use
* MaxTupleAttributeNumber.
*/
if (pstate->p_next_resno - 1 > MaxTupleAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("target lists can have at most %d entries",
MaxTupleAttributeNumber)));
return result;
}