Fix a crash when allocating huge memory segments
When allocating a huge item that requires it's own memory segment, we were actually not committing enough memory from the OS. Fixes: QTBUG-71501 Change-Id: Ic86a648bba4d7f1eeeded78d8de0f0fc1d3a251d Reviewed-by: Ulf Hermann <ulf.hermann@qt.io>
This commit is contained in:
parent
acd0882f81
commit
19b8799958
|
@ -666,11 +666,10 @@ HeapItem *HugeItemAllocator::allocate(size_t size) {
|
|||
Chunk *c = nullptr;
|
||||
if (size >= MemorySegment::SegmentSize/2) {
|
||||
// too large to handle through the ChunkAllocator, let's get our own memory segement
|
||||
size_t segmentSize = size + Chunk::HeaderSize; // space required for the Chunk header
|
||||
size += Chunk::HeaderSize; // space required for the Chunk header
|
||||
size_t pageSize = WTF::pageSize();
|
||||
segmentSize = (segmentSize + pageSize - 1) & ~(pageSize - 1); // align to page sizes
|
||||
m = new MemorySegment(segmentSize);
|
||||
size = (size + pageSize - 1) & ~(pageSize - 1); // align to page sizes
|
||||
m = new MemorySegment(size);
|
||||
c = m->allocate(size);
|
||||
} else {
|
||||
c = chunkAllocator->allocate(size);
|
||||
|
|
|
@ -359,6 +359,7 @@ private slots:
|
|||
void temporaryDeadZone();
|
||||
void importLexicalVariables_data();
|
||||
void importLexicalVariables();
|
||||
void hugeObject();
|
||||
|
||||
private:
|
||||
// static void propertyVarWeakRefCallback(v8::Persistent<v8::Value> object, void* parameter);
|
||||
|
@ -8846,6 +8847,17 @@ void tst_qqmlecmascript::importLexicalVariables()
|
|||
QCOMPARE(result, QVariant(expected));
|
||||
}
|
||||
|
||||
void tst_qqmlecmascript::hugeObject()
|
||||
{
|
||||
// mainly check that this doesn't crash
|
||||
QJSEngine engine;
|
||||
QJSValue v = engine.evaluate(QString::fromLatin1(
|
||||
"var known = {}, prefix = 'x'\n"
|
||||
"for (var i = 0; i < 150000; i++) known[prefix + i] = true;"
|
||||
));
|
||||
QVERIFY(!v.isError());
|
||||
}
|
||||
|
||||
QTEST_MAIN(tst_qqmlecmascript)
|
||||
|
||||
#include "tst_qqmlecmascript.moc"
|
||||
|
|
Loading…
Reference in New Issue