Error: Failures:
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testScaleWriters:3853->BaseHiveConnectorTest.testWithAllStorageFormats:8360->BaseHiveConnectorTest.testWithStorageFormat:8373 Failure for format PARQUET with properties {hive={experimental_parquet_optimized_writer_enabled=false}}
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testSelectWithNoColumns:7621->BaseHiveConnectorTest.testWithAllStorageFormats:8360->BaseHiveConnectorTest.testWithStorageFormat:8373 Failure for format PARQUET with properties {hive={experimental_parquet_optimized_writer_enabled=false}}
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testSubfieldReordering:5210->AbstractTestQueryFramework.assertUpdate:318->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testTimestampPrecisionCtas:7992->BaseHiveConnectorTest.testWithAllStorageFormats:8360->BaseHiveConnectorTest.testWithStorageFormat:8373 Failure for format PARQUET with properties {hive={experimental_parquet_optimized_writer_enabled=false}}
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testTimestampPrecisionInsert:7965->BaseHiveConnectorTest.testWithAllStorageFormats:8360->BaseHiveConnectorTest.testWithStorageFormat:8373 Failure for format PARQUET with properties {hive={experimental_parquet_optimized_writer_enabled=false}}
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testUseColumnAddDrop:8195->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testUseColumnNames:8140->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: TestHiveConnectorTest>BaseHiveConnectorTest.testUseColumnNames:8140->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: TestParquetPageSkipping.testPageSkippingWithNonSequentialOffsets:102->AbstractTestQueryFramework.assertUpdate:318->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: TestParquetPageSkipping.testPageSkipping:139->buildSortedTables:74->AbstractTestQueryFramework.assertUpdate:323 » QueryFailed
Error: io.trino.plugin.hive.TestHiveConnectorTest.testUseColumnNames[PARQUET, false](4) Time elapsed: 0.098 s <<< FAILURE!
io.trino.testing.QueryFailedException: New Memory allocation 636336 bytes is smaller than the minimum allocation size of 1048576 bytes.
at io.trino.testing.AbstractTestingTrinoClient.execute(AbstractTestingTrinoClient.java:123)
at io.trino.testing.DistributedQueryRunner.execute(DistributedQueryRunner.java:479)
at io.trino.testing.QueryAssertions.assertUpdate(QueryAssertions.java:71)
at io.trino.testing.AbstractTestQueryFramework.assertUpdate(AbstractTestQueryFramework.java:323)
at io.trino.plugin.hive.BaseHiveConnectorTest.testUseColumnNames(BaseHiveConnectorTest.java:8140)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.testng.internal.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:104)
at org.testng.internal.Invoker.invokeMethod(Invoker.java:645)
at org.testng.internal.Invoker.invokeTestMethod(Invoker.java:851)
at org.testng.internal.Invoker.invokeTestMethods(Invoker.java:1177)
at org.testng.internal.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:129)
at org.testng.internal.TestMethodWorker.run(TestMethodWorker.java:112)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Suppressed: java.lang.Exception: SQL: INSERT INTO test_renames_parquet_false_1036p9fb8h VALUES(111, 'Katy', 57, 'CA')
at io.trino.testing.DistributedQueryRunner.execute(DistributedQueryRunner.java:482)
... 16 more
Caused by: org.apache.parquet.hadoop.ParquetMemoryManagerRuntimeException: New Memory allocation 636336 bytes is smaller than the minimum allocation size of 1048576 bytes.
at org.apache.parquet.hadoop.MemoryManager.updateAllocation(MemoryManager.java:132)
at org.apache.parquet.hadoop.MemoryManager.addWriter(MemoryManager.java:86)
at org.apache.parquet.hadoop.ParquetRecordWriter.<init>(ParquetRecordWriter.java:155)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:501)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:430)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:425)
at org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper.<init>(ParquetRecordWriterWrapper.java:70)
at org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat.getParquerRecordWriterWrapper(MapredParquetOutputFormat.java:137)
at org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat.getHiveRecordWriter(MapredParquetOutputFormat.java:126)
at io.trino.plugin.hive.parquet.ParquetRecordWriter.create(ParquetRecordWriter.java:66)
at io.trino.plugin.hive.util.HiveWriteUtils.createRecordWriter(HiveWriteUtils.java:185)
at io.trino.plugin.hive.RecordFileWriter.<init>(RecordFileWriter.java:113)
at io.trino.plugin.hive.HiveWriterFactory.createWriter(HiveWriterFactory.java:538)
at io.trino.plugin.hive.HivePageSink.getWriterIndexes(HivePageSink.java:402)
at io.trino.plugin.hive.HivePageSink.writePage(HivePageSink.java:301)
at io.trino.plugin.hive.HivePageSink.doAppend(HivePageSink.java:296)
at io.trino.plugin.hive.HivePageSink.lambda$appendPage$2(HivePageSink.java:282)
at io.trino.plugin.hive.authentication.HdfsAuthentication.lambda$doAs$0(HdfsAuthentication.java:26)
at io.trino.plugin.hive.authentication.NoHdfsAuthentication.doAs(NoHdfsAuthentication.java:25)
at io.trino.plugin.hive.authentication.HdfsAuthentication.doAs(HdfsAuthentication.java:25)
at io.trino.plugin.hive.HdfsEnvironment.doAs(HdfsEnvironment.java:102)
at io.trino.plugin.hive.HivePageSink.appendPage(HivePageSink.java:282)
at io.trino.plugin.base.classloader.ClassLoaderSafeConnectorPageSink.appendPage(ClassLoaderSafeConnectorPageSink.java:69)
at io.trino.operator.TableWriterOperator.addInput(TableWriterOperator.java:257)
at io.trino.operator.Driver.processInternal(Driver.java:415)
at io.trino.operator.Driver.lambda$process$10(Driver.java:313)
at io.trino.operator.Driver.tryWithLock(Driver.java:698)
at io.trino.operator.Driver.process(Driver.java:305)
at io.trino.operator.Driver.processForDuration(Driver.java:276)
at io.trino.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:740)
at io.trino.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:164)
at io.trino.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:490)
at io.trino.$gen.Trino_testversion____20220812_060842_14042.run(Unknown Source)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
https://github.com/trinodb/trino/runs/7800774821
Thank you @raunaqmorarka for fixing this!
https://github.com/trinodb/trino/pull/15742 disables the hadoop parquet MemoryManager, it should fix this problem.