Skip to content

Commit 28fbc15

Browse files
committed
refactor to add feature flag for s3 export approach
1 parent 1a88d37 commit 28fbc15

File tree

12 files changed

+765
-215
lines changed

12 files changed

+765
-215
lines changed

athena-snowflake/athena-snowflake-connection.yaml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,13 @@ Parameters:
4242
Description: "(Optional) A custom role to be used by the Connector lambda"
4343
Type: String
4444
Default: ""
45+
EnableS3Export:
46+
Description: '(Optional) Enable S3 export functionality for data transfer. Set to true to use S3 export path, false for direct query path.'
47+
Type: String
48+
Default: 'false'
49+
AllowedValues:
50+
- 'true'
51+
- 'false'
4552

4653
Conditions:
4754
HasSecurityGroups: !Not [ !Equals [ !Join ["", !Ref SecurityGroupIds], "" ] ]
@@ -59,6 +66,7 @@ Resources:
5966
Environment:
6067
Variables:
6168
glue_connection: !Ref GlueConnection
69+
SNOWFLAKE_ENABLE_S3_EXPORT: !Ref EnableS3Export
6270
FunctionName: !Ref LambdaFunctionName
6371
PackageType: "Image"
6472
ImageUri: !Sub
@@ -87,7 +95,6 @@ Resources:
8795
Principal:
8896
Service:
8997
- lambda.amazonaws.com
90-
AWS: '*'
9198
Action:
9299
- "sts:AssumeRole"
93100

athena-snowflake/athena-snowflake.yaml

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,13 @@ Parameters:
5858
Description: "(Optional) A custom role to be used by the Connector lambda"
5959
Type: String
6060
Default: ""
61+
EnableS3Export:
62+
Description: '(Optional) Enable S3 export functionality for data transfer. Set to true to use S3 export path, false for direct query path.'
63+
Type: String
64+
Default: 'false'
65+
AllowedValues:
66+
- 'true'
67+
- 'false'
6168
Conditions:
6269
HasPermissionsBoundary: !Not [ !Equals [ !Ref PermissionsBoundaryARN, "" ] ]
6370
HasSecurityGroups: !Not [ !Equals [ !Join ["", !Ref SecurityGroupIds], "" ] ]
@@ -75,13 +82,14 @@ Resources:
7582
spill_bucket: !Ref SpillBucket
7683
spill_prefix: !Ref SpillPrefix
7784
default: !Ref DefaultConnectionString
85+
SNOWFLAKE_ENABLE_S3_EXPORT: !Ref EnableS3Export
7886
FunctionName: !Ref LambdaFunctionName
7987
PackageType: "Image"
8088
ImageUri: !Sub
8189
- '${Account}.dkr.ecr.${AWS::Region}.amazonaws.com/athena-federation-repository-snowflake:2022.47.1'
8290
- Account: !If [IsRegionBAH, 084828588479, !If [IsRegionHKG, 183295418215, 292517598671]]
8391
ImageConfig:
84-
Command: [ "com.amazonaws.athena.connectors.snowflake.SnowflakeCompositeHandler" ]
92+
Command: [ "com.amazonaws.athena.connectors.snowflake.SnowflakeMuxCompositeHandler" ]
8593
Description: "Enables Amazon Athena to communicate with Snowflake using JDBC"
8694
Timeout: !Ref LambdaTimeout
8795
MemorySize: !Ref LambdaMemory
@@ -104,7 +112,6 @@ Resources:
104112
Principal:
105113
Service:
106114
- lambda.amazonaws.com
107-
AWS: '*'
108115
Action:
109116
- "sts:AssumeRole"
110117

@@ -137,6 +144,13 @@ Resources:
137144
- athena:GetQueryExecution
138145
Effect: Allow
139146
Resource: '*'
147+
- Action:
148+
- ec2:CreateNetworkInterface
149+
- ec2:DeleteNetworkInterface
150+
- ec2:DescribeNetworkInterfaces
151+
- ec2:DetachNetworkInterface
152+
Effect: Allow
153+
Resource: '*'
140154
#S3CrudPolicy allows our connector to spill large responses to S3. You can optionally replace this pre-made policy
141155
#with one that is more restrictive and can only 'put' but not read,delete, or overwrite files.
142156
- Action:

athena-snowflake/src/main/java/com/amazonaws/athena/connectors/snowflake/SnowflakeCompositeHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ public class SnowflakeCompositeHandler
4242
{
4343
public SnowflakeCompositeHandler() throws CertificateEncodingException, IOException, NoSuchAlgorithmException, KeyStoreException
4444
{
45-
super(new SnowflakeMetadataHandler(new SnowflakeEnvironmentProperties().createEnvironment()), new SnowflakeRecordHandler(new SnowflakeEnvironmentProperties().createEnvironment()));
45+
super(new SnowflakeMetadataHandler(new SnowflakeEnvironmentProperties(System.getenv()).createEnvironment()), new SnowflakeRecordHandler(new SnowflakeEnvironmentProperties(System.getenv()).createEnvironment()));
4646
installCaCertificate();
4747
setupNativeEnvironmentVariables();
4848
}

athena-snowflake/src/main/java/com/amazonaws/athena/connectors/snowflake/SnowflakeConstants.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,15 @@ public final class SnowflakeConstants
2525
public static final String SNOWFLAKE_NAME = "snowflake";
2626
public static final String SNOWFLAKE_DRIVER_CLASS = "com.snowflake.client.jdbc.SnowflakeDriver";
2727
public static final int SNOWFLAKE_DEFAULT_PORT = 1025;
28+
/**
29+
* This constant limits the number of partitions. The default set to 50. A large number may cause a timeout issue.
30+
* We arrived at this number after performance testing with datasets of different size
31+
*/
32+
public static final int MAX_PARTITION_COUNT = 50;
2833
/**
2934
* This constant limits the number of records to be returned in a single split.
3035
*/
36+
public static final int SINGLE_SPLIT_LIMIT_COUNT = 10000;
3137
public static final String SNOWFLAKE_QUOTE_CHARACTER = "\"";
3238
/**
3339
* A ssl file location constant to store the SSL certificate

athena-snowflake/src/main/java/com/amazonaws/athena/connectors/snowflake/SnowflakeEnvironmentProperties.java

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,14 @@ public class SnowflakeEnvironmentProperties extends JdbcEnvironmentProperties
4343
private static final String DB_PROPERTY_KEY = "db";
4444
private static final String SCHEMA_PROPERTY_KEY = "schema";
4545
private static final String SNOWFLAKE_ESCAPE_CHARACTER = "\"";
46+
public static final String ENABLE_S3_EXPORT = "SNOWFLAKE_ENABLE_S3_EXPORT";
47+
48+
private final boolean enableS3Export;
49+
50+
public SnowflakeEnvironmentProperties(Map<String, String> properties)
51+
{
52+
this.enableS3Export = Boolean.parseBoolean(properties.getOrDefault(ENABLE_S3_EXPORT, "false"));
53+
}
4654

4755
@Override
4856
public Map<String, String> connectionPropertiesToEnvironment(Map<String, String> connectionProperties)
@@ -134,4 +142,9 @@ public static Map<String, String> getSnowFlakeParameter(Map<String, String> base
134142

135143
return parameters;
136144
}
145+
146+
public boolean isS3ExportEnabled()
147+
{
148+
return enableS3Export;
149+
}
137150
}

0 commit comments

Comments
 (0)