Skip to content

Commit 75c2552

Browse files
feat: enable 20MB request limit, this feature is allowlist only. (#2311)
* feat: add enableLargerRequestLimit option * . * . * add multiplexing test * . * . * . * . * . * . * . * . * . * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 179193a commit 75c2552

File tree

3 files changed

+110
-2
lines changed

3 files changed

+110
-2
lines changed

google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/ConnectionWorker.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ public static Boolean isDefaultStreamName(String streamName) {
260260

261261
/** The maximum size of one request. Defined by the API. */
262262
public static long getApiMaxRequestBytes() {
263-
return 10L * 1000L * 1000L; // 10 megabytes (https://en.wikipedia.org/wiki/Megabyte)
263+
return 20L * 1000L * 1000L; // 20 megabytes (https://en.wikipedia.org/wiki/Megabyte)
264264
}
265265

266266
static String extractProjectName(String streamName) {

google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/StreamWriterTest.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1091,7 +1091,8 @@ public void run() throws Throwable {
10911091
public void testMessageTooLarge() throws Exception {
10921092
StreamWriter writer = getTestStreamWriter();
10931093

1094-
String oversized = Strings.repeat("a", (int) (StreamWriter.getApiMaxRequestBytes() + 1));
1094+
// There is an oppotunity to allow 20MB requests.
1095+
String oversized = Strings.repeat("a", (int) (StreamWriter.getApiMaxRequestBytes() * 2 + 1));
10951096
ApiFuture<AppendRowsResponse> appendFuture1 = sendTestMessage(writer, new String[] {oversized});
10961097
assertTrue(appendFuture1.isDone());
10971098
StatusRuntimeException actualError =

google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/it/ITBigQueryWriteManualClientTest.java

Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@
3939
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
4040
import com.google.common.collect.ImmutableList;
4141
import com.google.protobuf.ByteString;
42+
import com.google.protobuf.DescriptorProtos.DescriptorProto;
43+
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto;
4244
import com.google.protobuf.Descriptors;
4345
import com.google.protobuf.Descriptors.DescriptorValidationException;
4446
import io.grpc.Status;
@@ -208,6 +210,21 @@ ProtoRows CreateProtoRows(String[] messages) {
208210
return rows.build();
209211
}
210212

213+
ProtoSchema CreateProtoSchemaWithColField() {
214+
return ProtoSchema.newBuilder()
215+
.setProtoDescriptor(
216+
DescriptorProto.newBuilder()
217+
.setName("testProto")
218+
.addField(
219+
FieldDescriptorProto.newBuilder()
220+
.setName("col1")
221+
.setNumber(1)
222+
.setType(FieldDescriptorProto.Type.TYPE_STRING)
223+
.build())
224+
.build())
225+
.build();
226+
}
227+
211228
ProtoRows CreateProtoOptionalRows(String[] messages) {
212229
ProtoRows.Builder rows = ProtoRows.newBuilder();
213230
for (String message : messages) {
@@ -1541,4 +1558,94 @@ public void testMultiplexingMixedLocation()
15411558
assertEquals("us", streamWriter2.getLocation());
15421559
assertEquals("eu", streamWriter3.getLocation());
15431560
}
1561+
1562+
// Tested locally but project config is frozen and we need to wait for a while to enable the
1563+
// test in automatic workflow.
1564+
// @Test
1565+
// public void testLargeRequest() throws IOException, InterruptedException, ExecutionException {
1566+
// String tableName = "largeRequestTable";
1567+
// TableId tableId = TableId.of(DATASET, tableName);
1568+
// Field col1 = Field.newBuilder("col1", StandardSQLTypeName.STRING).build();
1569+
// Schema originalSchema = Schema.of(col1);
1570+
// TableInfo tableInfo =
1571+
// TableInfo.newBuilder(tableId, StandardTableDefinition.of(originalSchema)).build();
1572+
// bigquery.create(tableInfo);
1573+
// TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, tableName);
1574+
// try (StreamWriter streamWriter =
1575+
// StreamWriter.newBuilder(parent.toString() + "/_default")
1576+
// .setWriterSchema(CreateProtoSchemaWithColField())
1577+
// .build()) {
1578+
// List<Integer> sizeSet = Arrays.asList(15 * 1024 * 1024, 1024);
1579+
// List<ApiFuture<AppendRowsResponse>> responseList =
1580+
// new ArrayList<ApiFuture<AppendRowsResponse>>();
1581+
// Random r = new Random();
1582+
// for (int i = 0; i < 50; i++) {
1583+
// int size = sizeSet.get(r.nextInt(2));
1584+
// LOG.info("Sending size: " + size);
1585+
// responseList.add(
1586+
// streamWriter.append(
1587+
// CreateProtoRows(
1588+
// new String[] {
1589+
// new String(new char[size]).replace('\u0000', (char) (r.nextInt(26) + 'a'))
1590+
// })));
1591+
// }
1592+
// for (int i = 0; i < 50; i++) {
1593+
// assertFalse(responseList.get(i).get().hasError());
1594+
// }
1595+
// TableResult queryResult =
1596+
// bigquery.query(
1597+
// QueryJobConfiguration.newBuilder("SELECT count(*) from " + DATASET + '.' +
1598+
// tableName)
1599+
// .build());
1600+
// Iterator<FieldValueList> queryIter = queryResult.getValues().iterator();
1601+
// assertTrue(queryIter.hasNext());
1602+
// assertEquals("50", queryIter.next().get(0).getStringValue());
1603+
// }
1604+
// }
1605+
1606+
@Test
1607+
public void testDefaultRequestLimit()
1608+
throws IOException, InterruptedException, ExecutionException {
1609+
DatasetId datasetId =
1610+
DatasetId.of("bq-write-api-java-retry-test", RemoteBigQueryHelper.generateDatasetName());
1611+
DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetId).build();
1612+
bigquery.create(datasetInfo);
1613+
try {
1614+
String tableName = "requestTable";
1615+
TableId tableId = TableId.of(datasetId.getProject(), datasetId.getDataset(), tableName);
1616+
Field col1 = Field.newBuilder("col1", StandardSQLTypeName.STRING).build();
1617+
Schema originalSchema = Schema.of(col1);
1618+
TableInfo tableInfo =
1619+
TableInfo.newBuilder(tableId, StandardTableDefinition.of(originalSchema)).build();
1620+
bigquery.create(tableInfo);
1621+
TableName parent = TableName.of(datasetId.getProject(), datasetId.getDataset(), tableName);
1622+
try (StreamWriter streamWriter =
1623+
StreamWriter.newBuilder(parent.toString() + "/_default")
1624+
.setWriterSchema(CreateProtoSchemaWithColField())
1625+
.build()) {
1626+
ApiFuture<AppendRowsResponse> response =
1627+
streamWriter.append(
1628+
CreateProtoRows(
1629+
new String[] {new String(new char[19 * 1024 * 1024]).replace("\0", "a")}));
1630+
try {
1631+
response.get();
1632+
Assert.fail("Large request should fail with InvalidArgumentError");
1633+
} catch (ExecutionException ex) {
1634+
assertEquals(io.grpc.StatusRuntimeException.class, ex.getCause().getClass());
1635+
io.grpc.StatusRuntimeException actualError =
1636+
(io.grpc.StatusRuntimeException) ex.getCause();
1637+
// This verifies that the Beam connector can consume this custom exception's grpc
1638+
// StatusCode
1639+
assertEquals(Code.INVALID_ARGUMENT, actualError.getStatus().getCode());
1640+
assertThat(
1641+
actualError
1642+
.getStatus()
1643+
.getDescription()
1644+
.contains("AppendRows request too large: 19923131 limit 10485760"));
1645+
}
1646+
}
1647+
} finally {
1648+
RemoteBigQueryHelper.forceDelete(bigquery, datasetId.toString());
1649+
}
1650+
}
15441651
}

0 commit comments

Comments
 (0)