Skip to content

Commit cf6eba6

Browse files
R2-3497: Migrate R2 Auth Example to AWS S3 JS SDK v3 (#25077)
* R2-3497: migrate r2 auth example to aws s3 js sdk v3 other cleanup tasks * rahul nits --------- Co-authored-by: helloimalastair <[email protected]>
1 parent 6654dc8 commit cf6eba6

File tree

1 file changed

+108
-153
lines changed

1 file changed

+108
-153
lines changed

src/content/docs/r2/examples/authenticate-r2-auth-tokens.mdx

Lines changed: 108 additions & 153 deletions
Original file line numberDiff line numberDiff line change
@@ -3,130 +3,104 @@ title: Authenticate against R2 API using auth tokens
33
pcx_content_type: Example
44
---
55

6-
import { Tabs, TabItem } from '~/components';
6+
import { PackageManagers, Tabs, TabItem } from '~/components';
77

88
The following example shows how to authenticate against R2 using the S3 API and an API token.
99

1010
:::note
1111
For providing secure access to bucket objects for anonymous users, we recommend using [pre-signed URLs](/r2/api/s3/presigned-urls/) instead.
1212

13-
Pre-signed URLs do not require users to be a member of your organization and enable programmatic application directly.
13+
Pre-signed URLs do not require users to be a member of your organization and enable direct programmatic access to R2.
1414
:::
1515

16-
Ensure you have set the following environmental variables prior to running either example. Refer to [Get S3 API credentials from an API token](/r2/api/tokens/#get-s3-api-credentials-from-an-api-token) for more information.
16+
Ensure you have set the following environment variables prior to running either example. Refer to [Authentication](/r2/api/tokens/) for more information.
1717

1818
```sh
19-
export R2_ACCOUNT_ID=your_account_id
20-
export R2_ACCESS_KEY_ID=your_access_key_id
21-
export R2_SECRET_ACCESS_KEY=your_secret_access_key
22-
export R2_BUCKET_NAME=your_bucket_name
19+
export AWS_REGION=auto
20+
export AWS_ENDPOINT_URL=https://<account_id>.r2.cloudflarestorage.com
21+
export AWS_ACCESS_KEY_ID=your_access_key_id
22+
export AWS_SECRET_ACCESS_KEY=your_secret_access_key
2323
```
2424

2525
<Tabs>
2626
<TabItem label="JavaScript" icon="seti:javascript">
27-
Install the `aws-sdk` package for the S3 API:
27+
Install the `@aws-sdk/client-s3` package for the S3 API:
2828

29-
```sh
30-
npm install aws-sdk
31-
```
29+
<PackageManagers pkg="@aws-sdk/client-s3" />
3230

33-
```javascript
34-
const AWS = require('aws-sdk');
35-
36-
const ACCOUNT_ID = process.env.R2_ACCOUNT_ID;
37-
const ACCESS_KEY_ID = process.env.R2_ACCESS_KEY_ID;
38-
const SECRET_ACCESS_KEY = process.env.R2_SECRET_ACCESS_KEY;
39-
const BUCKET_NAME = process.env.R2_BUCKET_NAME;
40-
41-
// Configure the S3 client for Cloudflare R2
42-
const s3Client = new AWS.S3({
43-
endpoint: `https://${ACCOUNT_ID}.r2.cloudflarestorage.com`,
44-
accessKeyId: ACCESS_KEY_ID,
45-
secretAccessKey: SECRET_ACCESS_KEY,
46-
signatureVersion: 'v4',
47-
region: 'auto' // Cloudflare R2 doesn't use regions, but this is required by the SDK
48-
});
49-
50-
// Specify the object key
51-
const objectKey = '2024/08/02/ingested_0001.parquet';
52-
53-
// Function to fetch the object
54-
async function fetchObject() {
55-
try {
56-
const params = {
57-
Bucket: BUCKET_NAME,
58-
Key: objectKey
59-
};
60-
61-
const data = await s3Client.getObject(params).promise();
62-
console.log('Successfully fetched the object');
63-
64-
// Process the data as needed
65-
// For example, to get the content as a Buffer:
66-
// const content = data.Body;
67-
68-
// Or to save the file (requires 'fs' module):
69-
// const fs = require('fs').promises;
70-
// await fs.writeFile('ingested_0001.parquet', data.Body);
71-
72-
} catch (error) {
73-
console.error('Failed to fetch the object:', error);
74-
}
75-
}
31+
Run the following Node.js script with `node index.js`. Ensure you change `Bucket` to the name of your bucket, and `Key` to point to an existing file in your R2 bucket.
32+
33+
Note, tutorial below should function for TypeScript as well.
34+
35+
```javascript title="index.js"
36+
import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3";
37+
38+
const s3 = new S3Client();
7639

77-
fetchObject();
40+
const Bucket = "<YOUR_BUCKET_NAME>";
41+
const Key = "pfp.jpg";
42+
43+
const object = await s3.send(
44+
new GetObjectCommand({
45+
Bucket,
46+
Key,
47+
}),
48+
);
49+
50+
console.log("Successfully fetched the object", object.$metadata);
51+
52+
// Process the data as needed
53+
// For example, to get the content as a Buffer:
54+
// const content = data.Body;
55+
56+
// Or to save the file (requires 'fs' module):
57+
// import { writeFile } from "node:fs/promises";
58+
// await writeFile('ingested_0001.parquet', data.Body);
7859
```
79-
</TabItem>
80-
<TabItem label="Python" icon="seti:python">
60+
</TabItem>
61+
<TabItem label="Python" icon="seti:python">
8162

8263
Install the `boto3` S3 API client:
8364

8465
```sh
8566
pip install boto3
8667
```
8768

88-
Run the following Python script with `python3 get_r2_object.py`. Ensure you change `object_key` to point to an existing file in your R2 bucket.
69+
Run the following Python script with `python3 get_r2_object.py`. Ensure you change `bucket` to the name of your bucket, and `object_key` to point to an existing file in your R2 bucket.
8970

9071
```python title="get_r2_object.py"
91-
import os
9272
import boto3
9373
from botocore.client import Config
9474

95-
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
96-
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
97-
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
98-
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
99-
10075
# Configure the S3 client for Cloudflare R2
10176
s3_client = boto3.client('s3',
102-
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
103-
aws_access_key_id=ACCESS_KEY_ID,
104-
aws_secret_access_key=SECRET_ACCESS_KEY,
105-
config=Config(signature_version='s3v4')
77+
config=Config(signature_version='s3v4')
10678
)
10779

10880
# Specify the object key
81+
#
82+
bucket = '<YOUR_BUCKET_NAME>'
10983
object_key = '2024/08/02/ingested_0001.parquet'
11084

11185
try:
112-
# Fetch the object
113-
response = s3_client.get_object(Bucket=BUCKET_NAME, Key=object_key)
86+
# Fetch the object
87+
response = s3_client.get_object(Bucket=bucket, Key=object_key)
11488

115-
print('Successfully fetched the object')
89+
print('Successfully fetched the object')
11690

117-
# Process the response content as needed
118-
# For example, to read the content:
119-
# object_content = response['Body'].read()
91+
# Process the response content as needed
92+
# For example, to read the content:
93+
# object_content = response['Body'].read()
12094

121-
# Or to save the file:
122-
# with open('ingested_0001.parquet', 'wb') as f:
123-
# f.write(response['Body'].read())
95+
# Or to save the file:
96+
# with open('ingested_0001.parquet', 'wb') as f:
97+
# f.write(response['Body'].read())
12498

12599
except Exception as e:
126-
print(f'Failed to fetch the object. Error: {str(e)}')
127-
```
128-
</TabItem>
129-
<TabItem label="Go" icon="seti:go">
100+
print(f'Failed to fetch the object. Error: {str(e)}')
101+
```
102+
</TabItem>
103+
<TabItem label="Go" icon="seti:go">
130104

131105
Use `go get` to add the `aws-sdk-go-v2` packages to your Go project:
132106

@@ -137,84 +111,65 @@ export R2_BUCKET_NAME=your_bucket_name
137111
go get github.com/aws/aws-sdk-go-v2/service/s3
138112
```
139113

140-
Run the following Go application as a script with `go run main.go`. Ensure you change `objectKey` to point to an existing file in your R2 bucket.
114+
Run the following Go application as a script with `go run main.go`. Ensure you change `bucket` to the name of your bucket, and `objectKey` to point to an existing file in your R2 bucket.
141115

142116
```go
143-
package main
117+
package main
144118

145119
import (
146-
"context"
147-
"fmt"
148-
"io"
149-
"log"
150-
"os"
151-
152-
"github.com/aws/aws-sdk-go-v2/aws"
153-
"github.com/aws/aws-sdk-go-v2/config"
154-
"github.com/aws/aws-sdk-go-v2/credentials"
155-
"github.com/aws/aws-sdk-go-v2/service/s3"
120+
"context"
121+
"fmt"
122+
"io"
123+
"log"
124+
"github.com/aws/aws-sdk-go-v2/aws"
125+
"github.com/aws/aws-sdk-go-v2/config"
126+
"github.com/aws/aws-sdk-go-v2/service/s3"
156127
)
157128

158129
func main() {
159-
// Load environment variables
160-
accountID := os.Getenv("R2_ACCOUNT_ID")
161-
accessKeyID := os.Getenv("R2_ACCESS_KEY_ID")
162-
secretAccessKey := os.Getenv("R2_SECRET_ACCESS_KEY")
163-
bucketName := os.Getenv("R2_BUCKET_NAME")
164-
165-
// Configure the S3 client for Cloudflare R2
166-
r2Resolver := aws.EndpointResolverWithOptionsFunc(func(service, region string, options ...interface{}) (aws.Endpoint, error) {
167-
return aws.Endpoint{
168-
URL: fmt.Sprintf("https://%s.r2.cloudflarestorage.com", accountID),
169-
}, nil
170-
})
171-
172-
cfg, err := config.LoadDefaultConfig(context.TODO(),
173-
config.WithEndpointResolverWithOptions(r2Resolver),
174-
config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKeyID, secretAccessKey, "")),
175-
config.WithRegion("auto"), // Cloudflare R2 doesn't use regions, but this is required by the SDK
176-
)
177-
if err != nil {
178-
log.Fatalf("Unable to load SDK config, %v", err)
179-
}
180-
181-
// Create an S3 client
182-
client := s3.NewFromConfig(cfg)
183-
184-
// Specify the object key
185-
objectKey := "2024/08/02/ingested_0001.parquet"
186-
187-
// Fetch the object
188-
output, err := client.GetObject(context.TODO(), &s3.GetObjectInput{
189-
Bucket: aws.String(bucketName),
190-
Key: aws.String(objectKey),
191-
})
192-
if err != nil {
193-
log.Fatalf("Unable to fetch object, %v", err)
194-
}
195-
defer output.Body.Close()
196-
197-
fmt.Println("Successfully fetched the object")
198-
199-
// Process the object content as needed
200-
// For example, to save the file:
201-
// file, err := os.Create("ingested_0001.parquet")
202-
// if err != nil {
203-
// log.Fatalf("Unable to create file, %v", err)
204-
// }
205-
// defer file.Close()
206-
// _, err = io.Copy(file, output.Body)
207-
// if err != nil {
208-
// log.Fatalf("Unable to write file, %v", err)
209-
// }
210-
211-
// Or to read the content:
212-
content, err := io.ReadAll(output.Body)
213-
if err != nil {
214-
log.Fatalf("Unable to read object content, %v", err)
215-
}
216-
fmt.Printf("Object content length: %d bytes\n", len(content))
130+
cfg, err := config.LoadDefaultConfig(context.TODO())
131+
if err != nil {
132+
log.Fatalf("Unable to load SDK config, %v", err)
133+
}
134+
135+
// Create an S3 client
136+
client := s3.NewFromConfig(cfg)
137+
138+
// Specify the object key
139+
bucket := "<YOUR_BUCKET_NAME>"
140+
objectKey := "pfp.jpg"
141+
142+
// Fetch the object
143+
output, err := client.GetObject(context.TODO(), &s3.GetObjectInput{
144+
Bucket: aws.String(bucket),
145+
Key: aws.String(objectKey),
146+
})
147+
if err != nil {
148+
log.Fatalf("Unable to fetch object, %v", err)
149+
}
150+
defer output.Body.Close()
151+
152+
fmt.Println("Successfully fetched the object")
153+
154+
// Process the object content as needed
155+
// For example, to save the file:
156+
// file, err := os.Create("ingested_0001.parquet")
157+
// if err != nil {
158+
// log.Fatalf("Unable to create file, %v", err)
159+
// }
160+
// defer file.Close()
161+
// _, err = io.Copy(file, output.Body)
162+
// if err != nil {
163+
// log.Fatalf("Unable to write file, %v", err)
164+
// }
165+
166+
// Or to read the content:
167+
content, err := io.ReadAll(output.Body)
168+
if err != nil {
169+
log.Fatalf("Unable to read object content, %v", err)
170+
}
171+
fmt.Printf("Object content length: %d bytes\n", len(content))
217172
}
218173
```
219-
</TabItem>
220-
</Tabs>
174+
</TabItem>
175+
</Tabs>

0 commit comments

Comments
 (0)