Skip to content

Commit 2f2211f

Browse files
Merge branch 'release-1.40.24' into develop
* release-1.40.24: Bumping version to 1.40.24 Update endpoints model Update to latest models
2 parents eaa97a3 + 90396a7 commit 2f2211f

File tree

11 files changed

+528
-301
lines changed

11 files changed

+528
-301
lines changed

.changes/1.40.24.json

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
[
2+
{
3+
"category": "``cleanrooms``",
4+
"description": "Add support for configurable compute sizes for PySpark jobs.",
5+
"type": "api-change"
6+
},
7+
{
8+
"category": "``cloudformation``",
9+
"description": "ListHookResults API now supports retrieving invocation results for all CloudFormation Hooks (previously limited to create change set and Cloud Control operations) with new optional parameters for filtering by Hook status and ARN.",
10+
"type": "api-change"
11+
},
12+
{
13+
"category": "``ec2``",
14+
"description": "Add m8i, m8i-flex and i8ge instance types.",
15+
"type": "api-change"
16+
},
17+
{
18+
"category": "``opensearchserverless``",
19+
"description": "Add support for Federal Information Processing Standards (FIPS) and Federal Risk and Authorization Management Program (FedRAMP) compliance",
20+
"type": "api-change"
21+
},
22+
{
23+
"category": "``rds``",
24+
"description": "Added new EndpointNetworkType and TargetConnectionNetworkType fields in Proxy APIs to support IPv6",
25+
"type": "api-change"
26+
},
27+
{
28+
"category": "``verifiedpermissions``",
29+
"description": "Amazon Verified Permissions / Features : Adds support for datetime and duration attribute values.",
30+
"type": "api-change"
31+
}
32+
]

CHANGELOG.rst

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,17 @@
22
CHANGELOG
33
=========
44

5+
1.40.24
6+
=======
7+
8+
* api-change:``cleanrooms``: Add support for configurable compute sizes for PySpark jobs.
9+
* api-change:``cloudformation``: ListHookResults API now supports retrieving invocation results for all CloudFormation Hooks (previously limited to create change set and Cloud Control operations) with new optional parameters for filtering by Hook status and ARN.
10+
* api-change:``ec2``: Add m8i, m8i-flex and i8ge instance types.
11+
* api-change:``opensearchserverless``: Add support for Federal Information Processing Standards (FIPS) and Federal Risk and Authorization Management Program (FedRAMP) compliance
12+
* api-change:``rds``: Added new EndpointNetworkType and TargetConnectionNetworkType fields in Proxy APIs to support IPv6
13+
* api-change:``verifiedpermissions``: Amazon Verified Permissions / Features : Adds support for datetime and duration attribute values.
14+
15+
516
1.40.23
617
=======
718

botocore/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import re
1818
from logging import NullHandler
1919

20-
__version__ = '1.40.23'
20+
__version__ = '1.40.24'
2121

2222

2323
# Configure default logger to do nothing

botocore/data/cleanrooms/2022-02-17/service-2.json

Lines changed: 51 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8604,6 +8604,10 @@
86048604
"error":{
86058605
"shape":"ProtectedJobError",
86068606
"documentation":"<p> The error from the protected job.</p>"
8607+
},
8608+
"computeConfiguration":{
8609+
"shape":"ProtectedJobComputeConfiguration",
8610+
"documentation":"<p>The compute configuration for the protected job.</p>"
86078611
}
86088612
},
86098613
"documentation":"<p>The parameters for an Clean Rooms protected job.</p>"
@@ -8612,6 +8616,17 @@
86128616
"type":"string",
86138617
"enum":["DIRECT_ANALYSIS"]
86148618
},
8619+
"ProtectedJobComputeConfiguration":{
8620+
"type":"structure",
8621+
"members":{
8622+
"worker":{
8623+
"shape":"ProtectedJobWorkerComputeConfiguration",
8624+
"documentation":"<p>The worker configuration for the compute environment.</p>"
8625+
}
8626+
},
8627+
"documentation":"<p>The configuration of the compute resources for a PySpark job.</p>",
8628+
"union":true
8629+
},
86158630
"ProtectedJobConfigurationDetails":{
86168631
"type":"structure",
86178632
"members":{
@@ -8925,6 +8940,37 @@
89258940
"type":"string",
89268941
"enum":["PYSPARK"]
89278942
},
8943+
"ProtectedJobWorkerComputeConfiguration":{
8944+
"type":"structure",
8945+
"required":[
8946+
"type",
8947+
"number"
8948+
],
8949+
"members":{
8950+
"type":{
8951+
"shape":"ProtectedJobWorkerComputeType",
8952+
"documentation":"<p>The worker compute configuration type.</p>"
8953+
},
8954+
"number":{
8955+
"shape":"ProtectedJobWorkerComputeConfigurationNumberInteger",
8956+
"documentation":"<p>The number of workers for a PySpark job.</p>"
8957+
}
8958+
},
8959+
"documentation":"<p>The configuration of the compute resources for a PySpark job.</p>"
8960+
},
8961+
"ProtectedJobWorkerComputeConfigurationNumberInteger":{
8962+
"type":"integer",
8963+
"box":true,
8964+
"max":128,
8965+
"min":4
8966+
},
8967+
"ProtectedJobWorkerComputeType":{
8968+
"type":"string",
8969+
"enum":[
8970+
"CR.1X",
8971+
"CR.4X"
8972+
]
8973+
},
89288974
"ProtectedQuery":{
89298975
"type":"structure",
89308976
"required":[
@@ -9901,6 +9947,10 @@
99019947
"resultConfiguration":{
99029948
"shape":"ProtectedJobResultConfigurationInput",
99039949
"documentation":"<p>The details needed to write the job results.</p>"
9950+
},
9951+
"computeConfiguration":{
9952+
"shape":"ProtectedJobComputeConfiguration",
9953+
"documentation":"<p>The compute configuration for the protected job.</p>"
99049954
}
99059955
}
99069956
},
@@ -10655,7 +10705,7 @@
1065510705
},
1065610706
"number":{
1065710707
"shape":"WorkerComputeConfigurationNumberInteger",
10658-
"documentation":"<p> The number of workers.</p>"
10708+
"documentation":"<p> The number of workers.</p> <p>SQL queries support a minimum value of 2 and a maximum value of 400. </p> <p>PySpark jobs support a minimum value of 4 and a maximum value of 128.</p>"
1065910709
}
1066010710
},
1066110711
"documentation":"<p> The configuration of the compute resources for workers running an analysis with the Clean Rooms SQL analytics engine.</p>"

0 commit comments

Comments
 (0)