|
39 | 39 | import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
|
40 | 40 | import com.google.common.collect.ImmutableList;
|
41 | 41 | import com.google.protobuf.ByteString;
|
| 42 | +import com.google.protobuf.DescriptorProtos.DescriptorProto; |
| 43 | +import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; |
42 | 44 | import com.google.protobuf.Descriptors;
|
43 | 45 | import com.google.protobuf.Descriptors.DescriptorValidationException;
|
44 | 46 | import io.grpc.Status;
|
@@ -208,6 +210,21 @@ ProtoRows CreateProtoRows(String[] messages) {
|
208 | 210 | return rows.build();
|
209 | 211 | }
|
210 | 212 |
|
| 213 | + ProtoSchema CreateProtoSchemaWithColField() { |
| 214 | + return ProtoSchema.newBuilder() |
| 215 | + .setProtoDescriptor( |
| 216 | + DescriptorProto.newBuilder() |
| 217 | + .setName("testProto") |
| 218 | + .addField( |
| 219 | + FieldDescriptorProto.newBuilder() |
| 220 | + .setName("col1") |
| 221 | + .setNumber(1) |
| 222 | + .setType(FieldDescriptorProto.Type.TYPE_STRING) |
| 223 | + .build()) |
| 224 | + .build()) |
| 225 | + .build(); |
| 226 | + } |
| 227 | + |
211 | 228 | ProtoRows CreateProtoOptionalRows(String[] messages) {
|
212 | 229 | ProtoRows.Builder rows = ProtoRows.newBuilder();
|
213 | 230 | for (String message : messages) {
|
@@ -1541,4 +1558,94 @@ public void testMultiplexingMixedLocation()
|
1541 | 1558 | assertEquals("us", streamWriter2.getLocation());
|
1542 | 1559 | assertEquals("eu", streamWriter3.getLocation());
|
1543 | 1560 | }
|
| 1561 | + |
| 1562 | + // Tested locally but project config is frozen and we need to wait for a while to enable the |
| 1563 | + // test in automatic workflow. |
| 1564 | + // @Test |
| 1565 | + // public void testLargeRequest() throws IOException, InterruptedException, ExecutionException { |
| 1566 | + // String tableName = "largeRequestTable"; |
| 1567 | + // TableId tableId = TableId.of(DATASET, tableName); |
| 1568 | + // Field col1 = Field.newBuilder("col1", StandardSQLTypeName.STRING).build(); |
| 1569 | + // Schema originalSchema = Schema.of(col1); |
| 1570 | + // TableInfo tableInfo = |
| 1571 | + // TableInfo.newBuilder(tableId, StandardTableDefinition.of(originalSchema)).build(); |
| 1572 | + // bigquery.create(tableInfo); |
| 1573 | + // TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, tableName); |
| 1574 | + // try (StreamWriter streamWriter = |
| 1575 | + // StreamWriter.newBuilder(parent.toString() + "/_default") |
| 1576 | + // .setWriterSchema(CreateProtoSchemaWithColField()) |
| 1577 | + // .build()) { |
| 1578 | + // List<Integer> sizeSet = Arrays.asList(15 * 1024 * 1024, 1024); |
| 1579 | + // List<ApiFuture<AppendRowsResponse>> responseList = |
| 1580 | + // new ArrayList<ApiFuture<AppendRowsResponse>>(); |
| 1581 | + // Random r = new Random(); |
| 1582 | + // for (int i = 0; i < 50; i++) { |
| 1583 | + // int size = sizeSet.get(r.nextInt(2)); |
| 1584 | + // LOG.info("Sending size: " + size); |
| 1585 | + // responseList.add( |
| 1586 | + // streamWriter.append( |
| 1587 | + // CreateProtoRows( |
| 1588 | + // new String[] { |
| 1589 | + // new String(new char[size]).replace('\u0000', (char) (r.nextInt(26) + 'a')) |
| 1590 | + // }))); |
| 1591 | + // } |
| 1592 | + // for (int i = 0; i < 50; i++) { |
| 1593 | + // assertFalse(responseList.get(i).get().hasError()); |
| 1594 | + // } |
| 1595 | + // TableResult queryResult = |
| 1596 | + // bigquery.query( |
| 1597 | + // QueryJobConfiguration.newBuilder("SELECT count(*) from " + DATASET + '.' + |
| 1598 | + // tableName) |
| 1599 | + // .build()); |
| 1600 | + // Iterator<FieldValueList> queryIter = queryResult.getValues().iterator(); |
| 1601 | + // assertTrue(queryIter.hasNext()); |
| 1602 | + // assertEquals("50", queryIter.next().get(0).getStringValue()); |
| 1603 | + // } |
| 1604 | + // } |
| 1605 | + |
| 1606 | + @Test |
| 1607 | + public void testDefaultRequestLimit() |
| 1608 | + throws IOException, InterruptedException, ExecutionException { |
| 1609 | + DatasetId datasetId = |
| 1610 | + DatasetId.of("bq-write-api-java-retry-test", RemoteBigQueryHelper.generateDatasetName()); |
| 1611 | + DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetId).build(); |
| 1612 | + bigquery.create(datasetInfo); |
| 1613 | + try { |
| 1614 | + String tableName = "requestTable"; |
| 1615 | + TableId tableId = TableId.of(datasetId.getProject(), datasetId.getDataset(), tableName); |
| 1616 | + Field col1 = Field.newBuilder("col1", StandardSQLTypeName.STRING).build(); |
| 1617 | + Schema originalSchema = Schema.of(col1); |
| 1618 | + TableInfo tableInfo = |
| 1619 | + TableInfo.newBuilder(tableId, StandardTableDefinition.of(originalSchema)).build(); |
| 1620 | + bigquery.create(tableInfo); |
| 1621 | + TableName parent = TableName.of(datasetId.getProject(), datasetId.getDataset(), tableName); |
| 1622 | + try (StreamWriter streamWriter = |
| 1623 | + StreamWriter.newBuilder(parent.toString() + "/_default") |
| 1624 | + .setWriterSchema(CreateProtoSchemaWithColField()) |
| 1625 | + .build()) { |
| 1626 | + ApiFuture<AppendRowsResponse> response = |
| 1627 | + streamWriter.append( |
| 1628 | + CreateProtoRows( |
| 1629 | + new String[] {new String(new char[19 * 1024 * 1024]).replace("\0", "a")})); |
| 1630 | + try { |
| 1631 | + response.get(); |
| 1632 | + Assert.fail("Large request should fail with InvalidArgumentError"); |
| 1633 | + } catch (ExecutionException ex) { |
| 1634 | + assertEquals(io.grpc.StatusRuntimeException.class, ex.getCause().getClass()); |
| 1635 | + io.grpc.StatusRuntimeException actualError = |
| 1636 | + (io.grpc.StatusRuntimeException) ex.getCause(); |
| 1637 | + // This verifies that the Beam connector can consume this custom exception's grpc |
| 1638 | + // StatusCode |
| 1639 | + assertEquals(Code.INVALID_ARGUMENT, actualError.getStatus().getCode()); |
| 1640 | + assertThat( |
| 1641 | + actualError |
| 1642 | + .getStatus() |
| 1643 | + .getDescription() |
| 1644 | + .contains("AppendRows request too large: 19923131 limit 10485760")); |
| 1645 | + } |
| 1646 | + } |
| 1647 | + } finally { |
| 1648 | + RemoteBigQueryHelper.forceDelete(bigquery, datasetId.toString()); |
| 1649 | + } |
| 1650 | + } |
1544 | 1651 | }
|
0 commit comments