Commit 6f2a3df9 by Haohao Jiang

Merge branch 'feat-upload-file-to-s3' into test-detect-master-if-publish

parents fdb18f6a 8781869c
...@@ -173,3 +173,5 @@ dist ...@@ -173,3 +173,5 @@ dist
# Finder (MacOS) folder config # Finder (MacOS) folder config
.DS_Store .DS_Store
.node-version
\ No newline at end of file
# falcon # falcon
<!-- Remote Repository URL -->
[Repository URL](https://cd.i.strikingly.com/walter.huang/Falcon)
To install dependencies: To install dependencies:
```bash ```bash
...@@ -13,3 +16,26 @@ bun run index.ts ...@@ -13,3 +16,26 @@ bun run index.ts
``` ```
This project was created using `bun init` in bun v1.1.18. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. This project was created using `bun init` in bun v1.1.18. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.
## AWS S3 配置
使用 S3 文件上传功能,需要设置以下环境变量:
```bash
AWS_REGION=ap-northeast-1
AWS_ACCESS_KEY_ID=your_access_key_id
AWS_SECRET_ACCESS_KEY=your_secret_key
AWS_S3_BUCKET_NAME=your_bucket_name
```
### 使用示例
```typescript
import { uploadFileFromEnv } from './src/clients/s3';
const key = await uploadFileFromEnv(
'/path/to/local/file.pdf',
'uploads/file.pdf',
{ contentType: 'application/pdf', acl: 'private' }
);
```
docker build -t reg.i.strikingly.com/falcon:v0.7.3-uat . docker build -t reg.i.strikingly.com/falcon:v0.8.1-uat .
docker push reg.i.strikingly.com/falcon:v0.7.3-uat docker push reg.i.strikingly.com/falcon:v0.8.1-uat
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
"typescript": "^5.0.0" "typescript": "^5.0.0"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.709.0",
"@slack/web-api": "^7.9.3", "@slack/web-api": "^7.9.3",
"deep-diff": "^1.0.2", "deep-diff": "^1.0.2",
"googleapis": "^150.0.1", "googleapis": "^150.0.1",
......
/**
* AWS S3 文件上传 client
*
* 使用示例:
* ```typescript
* import { uploadFileFromEnv } from './clients/s3';
*
* // 上传文件(自动读取环境变量)
* const key = await uploadFileFromEnv(
* '/path/to/file.pdf',
* 'uploads/file.pdf',
* { contentType: 'application/pdf', acl: 'private' }
* );
* ```
*/
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
import { createReadStream, statSync } from 'fs';
import { basename } from 'path';
/**
* S3 Configuration
*/
export interface S3Config {
region: string;
accessKeyId: string;
secretAccessKey: string;
bucketName: string;
}
/**
* Upload options
*/
export interface UploadOptions {
contentType?: string;
acl?: 'private' | 'public-read';
}
/**
* Create S3 client instance
*/
function createS3Client(config: S3Config): S3Client {
return new S3Client({
region: config.region,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
});
}
/**
* Upload a file to S3
* @param filePath Local path to the file
* @param key S3 object key (path within bucket)
* @param config S3 configuration
* @param options Upload options
* @returns The S3 object key
*/
export async function uploadFile(
filePath: string,
key: string,
config: S3Config,
options?: UploadOptions
): Promise<string> {
const client = createS3Client(config);
const fileStream = createReadStream(filePath);
const fileStat = statSync(filePath);
const uploadParams: any = {
Bucket: config.bucketName,
Key: key,
Body: fileStream,
ContentLength: fileStat.size,
};
if (options?.contentType) {
uploadParams.ContentType = options.contentType;
}
if (options?.acl) {
uploadParams.ACL = options.acl;
}
const command = new PutObjectCommand(uploadParams);
const response = await client.send(command);
if (response.$metadata.httpStatusCode !== 200) {
throw new Error(`S3 upload failed: ${response.$metadata.httpStatusCode}`);
}
return key;
}
/**
* Upload a file to S3 (自动读取环境变量)
* @param filePath Local path to the file
* @param key S3 object key (path within bucket)
* @param options Upload options
* @returns The S3 object key
*/
export async function uploadFileFromEnv(
filePath: string,
key: string,
options?: UploadOptions
): Promise<string> {
return uploadFile(filePath, key, createS3ConfigFromEnv(), options);
}
/**
* Create S3 config from environment variables
* - AWS_REGION: AWS region (e.g., ap-northeast-1)
* - AWS_ACCESS_KEY_ID: AWS access key ID
* - AWS_SECRET_ACCESS_KEY: AWS secret access key
* - AWS_S3_BUCKET_NAME: S3 bucket name
*/
export function createS3ConfigFromEnv(): S3Config {
const region = process.env.AWS_REGION;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const bucketName = process.env.AWS_S3_BUCKET_NAME;
if (!region || !accessKeyId || !secretAccessKey || !bucketName) {
throw new Error(
'Missing required S3 environment variables. Please set: AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_S3_BUCKET_NAME'
);
}
return {
region,
accessKeyId,
secretAccessKey,
bucketName,
};
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment