diff --git a/.eslintrc b/.eslintrc index c799fe532..9bcdb4688 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,3 +1,6 @@ { - "extends": "eslint-config-egg" + "extends": [ + "eslint-config-egg/typescript", + "eslint-config-egg/lib/rules/enforce-node-prefix" + ] } diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index efb34a099..000000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,72 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ "master" ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ "master" ] - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'javascript' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - with: - category: "/language:${{matrix.language}}" diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index 5bb62c67d..19fb5717b 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -6,14 +6,10 @@ name: Node.js CI on: push: branches: - - main - master - - 1.x pull_request: branches: - - main - master - - 1.x jobs: build: @@ -22,15 +18,15 @@ jobs: strategy: fail-fast: false matrix: - node-version: [14.18.0, 14, 16, 18, 20] + node-version: [16, 18, 20] os: [ubuntu-latest] steps: - name: Checkout Git Source - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} @@ -52,6 +48,6 @@ jobs: ALI_SDK_STS_ENDPOINT: ${{ secrets.ALI_SDK_STS_ENDPOINT }} - name: Code Coverage - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 76cbd450b..a4e1158fa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,7 +2,7 @@ name: Release on: push: - branches: [ master, 1.x ] + branches: [ master ] jobs: release: diff --git a/.gitignore b/.gitignore index 8dae580ce..7bfa6aa31 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,6 @@ package-lock.json es .eslintcache + +.tshy*/ +dist/ diff --git a/AUTHORS b/AUTHORS deleted file mode 100644 index 6920f15cb..000000000 --- a/AUTHORS +++ /dev/null @@ -1,12 +0,0 @@ -# Ordered by date of first contribution. -# Auto-generated by 'contributors' on Wed, 01 Apr 2015 16:09:04 GMT. -# https://github.com/xingrz/node-contributors - -PeterRao (https://github.com/PeterRao) -dead_horse -chunpu -fengmk2 (https://github.com/fengmk2) -Yan Qing (https://github.com/zensh) -mars-coder (https://github.com/mars-coder) -Jacky Tang (https://github.com/jackytck) - diff --git a/CHANGELOG.md b/CHANGELOG.md index 8166229f9..ed967ec30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,3 +27,57 @@ ### Bug Fixes * auto release on action ([#8](https://github.com/node-modules/oss-client/issues/8)) ([e5bfe04](https://github.com/node-modules/oss-client/commit/e5bfe042163951d709c8197c136be7e9e6b9e89b)) + +--- + + +1.2.2 / 2022-12-09 +================== + +**fixes** + * [[`fc2fb8f`](http://github.com/node-modules/oss-client/commit/fc2fb8f9d1b23d355cc8cf12f46d1df6182c6f6f)] - 🐛 FIX: try to use result code first (fengmk2 <>) + +1.2.1 / 2022-12-04 +================== + +**fixes** + * [[`cc2cc06`](http://github.com/node-modules/oss-client/commit/cc2cc065ede44d5d40120b4877dfa85e25cd0199)] - 🐛 FIX: object.list type define (#7) (fengmk2 <>) + +1.2.0 / 2022-12-04 +================== + +**features** + * [[`a9ad395`](http://github.com/node-modules/oss-client/commit/a9ad39539889f083e0d7671ca19ecc1b263eed74)] - 📦 NEW: Try to use ctx.httpclient first (#6) (fengmk2 <>) + +1.1.1 / 2022-12-04 +================== + +**fixes** + * [[`38cebaa`](http://github.com/node-modules/oss-client/commit/38cebaa9f5868d67530cc34ef3cdb4023b9d3a1f)] - 🐛 FIX: Should add oss-interface to dependencies (#5) (fengmk2 <>) + +**others** + * [[`301b0a2`](http://github.com/node-modules/oss-client/commit/301b0a2a3fa9af2ce85e093747f59c8f677dded1)] - 🤖 TEST: Test enable parallel (#4) (fengmk2 <>) + +1.1.0 / 2022-10-27 +================== + +**features** + * [[`79b6302`](http://github.com/node-modules/oss-client/commit/79b6302b77bfabfc2750a5c5d48b4059cb04ac78)] - 📦 NEW: Add d.ts and IObjectSimple Client define (#3) (fengmk2 <>) + +**others** + * [[`8d9e935`](http://github.com/node-modules/oss-client/commit/8d9e935ee530ebd9477e6334991465ff59a75b4b)] - 📖 DOC: Remove browser document content (fengmk2 <>) + +1.0.1 / 2022-10-23 +================== + +**fixes** + * [[`e7b229f`](http://github.com/node-modules/oss-client/commit/e7b229f839925ff7a8069834b73fe34789e5e00f)] - 🐛 FIX: ClusterClient use class style (fengmk2 <>) + +1.0.0 / 2022-10-23 +================== + +**features** + * [[`fe3e2c1`](http://github.com/node-modules/oss-client/commit/fe3e2c1a119ffd3b8a8c77ab6b38ee545c14fb59)] - 👌 IMPROVE: Remove unuse ts files (#2) (fengmk2 <>),fatal: No names found, cannot describe anything. + +**others** + diff --git a/History.md b/History.md deleted file mode 100644 index e5fd8507a..000000000 --- a/History.md +++ /dev/null @@ -1,51 +0,0 @@ - -1.2.2 / 2022-12-09 -================== - -**fixes** - * [[`fc2fb8f`](http://github.com/node-modules/oss-client/commit/fc2fb8f9d1b23d355cc8cf12f46d1df6182c6f6f)] - 🐛 FIX: try to use result code first (fengmk2 <>) - -1.2.1 / 2022-12-04 -================== - -**fixes** - * [[`cc2cc06`](http://github.com/node-modules/oss-client/commit/cc2cc065ede44d5d40120b4877dfa85e25cd0199)] - 🐛 FIX: object.list type define (#7) (fengmk2 <>) - -1.2.0 / 2022-12-04 -================== - -**features** - * [[`a9ad395`](http://github.com/node-modules/oss-client/commit/a9ad39539889f083e0d7671ca19ecc1b263eed74)] - 📦 NEW: Try to use ctx.httpclient first (#6) (fengmk2 <>) - -1.1.1 / 2022-12-04 -================== - -**fixes** - * [[`38cebaa`](http://github.com/node-modules/oss-client/commit/38cebaa9f5868d67530cc34ef3cdb4023b9d3a1f)] - 🐛 FIX: Should add oss-interface to dependencies (#5) (fengmk2 <>) - -**others** - * [[`301b0a2`](http://github.com/node-modules/oss-client/commit/301b0a2a3fa9af2ce85e093747f59c8f677dded1)] - 🤖 TEST: Test enable parallel (#4) (fengmk2 <>) - -1.1.0 / 2022-10-27 -================== - -**features** - * [[`79b6302`](http://github.com/node-modules/oss-client/commit/79b6302b77bfabfc2750a5c5d48b4059cb04ac78)] - 📦 NEW: Add d.ts and IObjectSimple Client define (#3) (fengmk2 <>) - -**others** - * [[`8d9e935`](http://github.com/node-modules/oss-client/commit/8d9e935ee530ebd9477e6334991465ff59a75b4b)] - 📖 DOC: Remove browser document content (fengmk2 <>) - -1.0.1 / 2022-10-23 -================== - -**fixes** - * [[`e7b229f`](http://github.com/node-modules/oss-client/commit/e7b229f839925ff7a8069834b73fe34789e5e00f)] - 🐛 FIX: ClusterClient use class style (fengmk2 <>) - -1.0.0 / 2022-10-23 -================== - -**features** - * [[`fe3e2c1`](http://github.com/node-modules/oss-client/commit/fe3e2c1a119ffd3b8a8c77ab6b38ee545c14fb59)] - 👌 IMPROVE: Remove unuse ts files (#2) (fengmk2 <>),fatal: No names found, cannot describe anything. - -**others** - diff --git a/README.md b/README.md index cb04cd6fe..f457077a0 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # oss-client [![NPM version][npm-image]][npm-url] +[![Node.js CI](https://github.com/node-modules/oss-client/actions/workflows/nodejs.yml/badge.svg)](https://github.com/node-modules/oss-client/actions/workflows/nodejs.yml) [![coverage][cov-image]][cov-url] [npm-image]: https://img.shields.io/npm/v/oss-client.svg?style=flat-square @@ -8,7 +9,7 @@ [cov-image]: http://codecov.io/github/node-modules/oss-client/coverage.svg?branch=master [cov-url]: http://codecov.io/github/node-modules/oss-client?branch=master -Aliyun OSS(Object Storage Service) Node.js Client. +Alibaba cloud OSS(Object Storage Service) Node.js Client. ## Install @@ -36,128 +37,46 @@ All operation use es7 async/await to implement. All api is async function. - [Node.js Usage](#nodejs-usage) - [Compatibility](#compatibility) - [Basic usage](#basic-usage) - - [STS setup](#sts-setup) - [Data Regions](#data-regions) - [Create Account](#create-account) - [Create A Bucket Instance](#create-a-bucket-instance) - - [oss(options)](#ossoptions) - - [Bucket Operations](#bucket-operations) - - [.listBuckets(query[, options])](#listbucketsquery-options) - - [.putBucket(name[, options])](#putbucketname-options) - - [.deleteBucket(name[, options])](#deletebucketname-options) - - [.useBucket(name)](#usebucketname) - - [.getBucketInfo(name)](#getbucketinfoname) - - [.getBucketStat(name)](#getbucketstatname) - - [.getBucketLocation(name)](#getbucketlocationname) - - [.putBucketACL(name, acl[, options])](#putbucketaclname-acl-options) - - [.getBucketACL(name[, options])](#getbucketaclname-options) - - [.putBucketLogging(name, prefix[, options])](#putbucketloggingname-prefix-options) - - [.getBucketLogging(name[, options])](#getbucketloggingname-options) - - [.deleteBucketLogging(name[, options])](#deletebucketloggingname-options) - - [.putBucketWebsite(name, config[, options])](#putbucketwebsitename-config-options) - - [.getBucketWebsite(name[, options])](#getbucketwebsitename-options) - - [.deleteBucketWebsite(name[, options])](#deletebucketwebsitename-options) - - [.putBucketReferer(name, allowEmpty, referers[, options])](#putbucketreferername-allowempty-referers-options) - - [.getBucketReferer(name[, options])](#getbucketreferername-options) - - [.deleteBucketReferer(name[, options])](#deletebucketreferername-options) - - [.putBucketLifecycle(name, rules[, options])](#putbucketlifecyclename-rules-options) - - [.getBucketLifecycle(name[, options])](#getbucketlifecyclename-options) - - [.deleteBucketLifecycle(name[, options])](#deletebucketlifecyclename-options) - - [.putBucketCORS(name, rules[, options])](#putbucketcorsname-rules-options) - - [.getBucketCORS(name[, options])](#getbucketcorsname-options) - - [.deleteBucketCORS(name[, options])](#deletebucketcorsname-options) - - [.getBucketRequestPayment(bucketName[, options])](#getbucketrequestpaymentbucketname-options) - - [.putBucketRequestPayment(bucketName, payer[, options])](#putbucketrequestpaymentbucketname-payer-options) - - [.putBucketEncryption(name, rules)](#putbucketencryptionname-rules) - - [.getBucketEncryption(name)](#getbucketencryptionname) - - [.deleteBucketEncryption(name)](#deletebucketencryptionname) - - [.putBucketTags(name, tag[, options])](#putbuckettagsname-tag-options) - - [.getBucketTags(name[, options])](#getbuckettagsname-options) - - [.deleteBucketTags(name[, options])](#deletebuckettagsname-options) - - [.putBucketPolicy(name, policy[, options])](#putbucketpolicyname-policy-options) - - [.getBucketPolicy(name[, options])](#getbucketpolicyname-options) - - [.deleteBucketPolicy(name[, options])](#deletebucketpolicyname-options) - - [.getBucketVersioning(name[, options])](#getbucketversioningname-options) - - [.putBucketVersioning(name, status[, options])](#putbucketversioningname-status-options) - - [.getBucketInventory(name, inventoryId[, options])](#getbucketinventoryname-inventoryid-options) - - [putBucketInventory(name, inventory[, options])](#putbucketinventoryname-inventory-options) - - [deleteBucketInventory(name, inventoryId[, options])](#deletebucketinventoryname-inventoryid-options) - - [listBucketInventory(name[, options])](#listbucketinventoryname-options) - - [.abortBucketWorm(name[, options])](#abortbucketwormname-options) - - [.completeBucketWorm(name, wormId[, options])](#completebucketwormname-wormid-options) - - [.extendBucketWorm(name, wormId, days[, options])](#extendbucketwormname-wormid-days-options) - - [.getBucketWorm(name[, options])](#getbucketwormname-options) - - [.initiateBucketWorm(name, days[, options])](#initiatebucketwormname-days-options) + - [new OSSObject(options)](#new-ossobjectoptions) - [Object Operations](#object-operations) - - [.put(name, file[, options])](#putname-file-options) - - [.putStream(name, stream[, options])](#putstreamname-stream-options) - - [.append(name, file[, options])](#appendname-file-options) - - [.getObjectUrl(name[, baseUrl])](#getobjecturlname-baseurl) - - [.generateObjectUrl(name[, baseUrl])](#generateobjecturlname-baseurl) - - [.head(name[, options])](#headname-options) - - [.getObjectMeta(name[, options])](#getobjectmetaname-options) - - [.get(name[, file, options])](#getname-file-options) - - [.getStream(name[, options])](#getstreamname-options) - - [.delete(name[, options])](#deletename-options) - - [.copy(name, sourceName[, sourceBucket, options])](#copyname-sourcename-sourcebucket-options) - - [.putMeta(name, meta[, options])](#putmetaname-meta-options) - - [.deleteMulti(names[, options])](#deletemultinames-options) - - [.list(query[, options])](#listquery-options) - - [.listV2(query[, options])](#listv2query-options) - - [.getBucketVersions(query[, options])](#getbucketversionsquery-options) - - [.signatureUrl(name[, options])](#signatureurlname-options) - - [.asyncSignatureUrl(name[, options])](#asyncsignatureurlname-options) - - [.putACL(name, acl[, options])](#putaclname-acl-options) - - [.getACL(name[, options])](#getaclname-options) - - [.restore(name[, options])](#restorename-options) - - [.putSymlink(name, targetName[, options])](#putsymlinkname-targetname-options) - - [.getSymlink(name[, options])](#getsymlinkname-options) - - [.initMultipartUpload(name[, options])](#initmultipartuploadname-options) - - [.uploadPart(name, uploadId, partNo, file, start, end[, options])](#uploadpartname-uploadid-partno-file-start-end-options) - - [.uploadPartCopy(name, uploadId, partNo, range, sourceData[, options])](#uploadpartcopyname-uploadid-partno-range-sourcedata-options) - - [.completeMultipartUpload(name, uploadId, parts[, options])](#completemultipartuploadname-uploadid-parts-options) - - [.multipartUpload(name, file[, options])](#multipartuploadname-file-options) - - [.multipartUploadCopy(name, sourceData[, options])](#multipartuploadcopyname-sourcedata-options) - - [.listParts(name, uploadId[, query, options])](#listpartsname-uploadid-query-options) - - [.listUploads(query[, options])](#listuploadsquery-options) - - [.abortMultipartUpload(name, uploadId[, options])](#abortmultipartuploadname-uploadid-options) + - [.put(name, file\[, options\])](#putname-file-options) + - [.putStream(name, stream\[, options\])](#putstreamname-stream-options) + - [.append(name, file\[, options\])](#appendname-file-options) + - [.generateObjectUrl(name\[, baseUrl\])](#generateobjecturlname-baseurl) + - [.head(name\[, options\])](#headname-options) + - [.getObjectMeta(name\[, options\])](#getobjectmetaname-options) + - [.get(name\[, file, options\])](#getname-file-options) + - [.getStream(name\[, options\])](#getstreamname-options) + - [.delete(name\[, options\])](#deletename-options) + - [.copy(name, sourceName\[, sourceBucket, options\])](#copyname-sourcename-sourcebucket-options) + - [.putMeta(name, meta\[, options\])](#putmetaname-meta-options) + - [.deleteMulti(names\[, options\])](#deletemultinames-options) + - [.list(query\[, options\])](#listquery-options) + - [.listV2(query\[, options\])](#listv2query-options) + - [.getBucketVersions(query\[, options\])](#getbucketversionsquery-options) + - [.signatureUrl(name\[, options\])](#signatureurlname-options) + - [.asyncSignatureUrl(name\[, options\])](#asyncsignatureurlname-options) + - [.putACL(name, acl\[, options\])](#putaclname-acl-options) + - [.getACL(name\[, options\])](#getaclname-options) + - [.restore(name\[, options\])](#restorename-options) + - [.putSymlink(name, targetName\[, options\])](#putsymlinkname-targetname-options) + - [.getSymlink(name\[, options\])](#getsymlinkname-options) - [.calculatePostSignature(policy)](#calculatepostsignaturepolicy) - - [.getObjectTagging(name[, options])](#getobjecttaggingname-options) - - [.putObjectTagging(name, tag[, options])](#putobjecttaggingname-tag-options) - - [.deleteObjectTagging(name[, options])](#deleteobjecttaggingname-options) - - [.processObjectSave(sourceObject, targetObject, process[, targetBucket])](#processobjectsavesourceobject-targetobject-process-targetbucket) - - [RTMP Operations](#rtmp-operations) - - [.putChannel(id, conf[, options])](#putchannelid-conf-options) - - [.getChannel(id[, options])](#getchannelid-options) - - [.deleteChannel(id[, options])](#deletechannelid-options) - - [.putChannelStatus(id, status[, options])](#putchannelstatusid-status-options) - - [.getChannelStatus(id[, options])](#getchannelstatusid-options) - - [.listChannels(query[, options])](#listchannelsquery-options) - - [.getChannelHistory(id[, options])](#getchannelhistoryid-options) - - [.createVod(id, name, time[, options])](#createvodid-name-time-options) - - [.getRtmpUrl(channelId[, options])](#getrtmpurlchannelid-options) - - [Create A Image Service Instance](#create-a-image-service-instance) - - [oss.ImageClient(options)](#ossimageclientoptions) - - [Image Operations](#image-operations) - - [imgClient.get(name, file[, options])](#imgclientgetname-file-options) - - [imgClient.getStream(name[, options])](#imgclientgetstreamname-options) - - [imgClient.getExif(name[, options])](#imgclientgetexifname-options) - - [imgClient.getInfo(name[, options])](#imgclientgetinfoname-options) - - [imgClient.putStyle(name, style[, options])](#imgclientputstylename-style-options) - - [imgClient.getStyle(name[, options])](#imgclientgetstylename-options) - - [imgClient.listStyle([options])](#imgclientliststyleoptions) - - [imgClient.deleteStyle(name[, options])](#imgclientdeletestylename-options) - - [imgClient.signatureUrl(name)](#imgclientsignatureurlname) - - [Cluster Mode](#cluster-mode) - - [Get Methods](#get-methods) - - [Put Methods](#put-methods) + - [.getObjectTagging(name\[, options\])](#getobjecttaggingname-options) + - [.putObjectTagging(name, tag\[, options\])](#putobjecttaggingname-tag-options) + - [.deleteObjectTagging(name\[, options\])](#deleteobjecttaggingname-options) + - [.processObjectSave(sourceObject, targetObject, process\[, targetBucket\])](#processobjectsavesourceobject-targetobject-process-targetbucket) - [Known Errors](#known-errors) + - [Contributors](#contributors) ## Node.js Usage ### Compatibility -- Node.js >= 14.0.0 +- Node.js >= 16.0.0 - urllib >= 3.0.0 ### Basic usage @@ -170,21 +89,32 @@ npm install oss-client 2. for example: +Commonjs + ```js -const OSSClient = require('oss-client'); -const store = new OSSClient({ +const { OSSObject } = require('oss-client'); +const ossObject = new OSSObject({ region: '', + endpoint: '', accessKeyId: '', accessKeySecret: '', bucket: '' }); ``` -### STS setup +TypeScript and ESM + +```ts +import { OSSObject } from 'oss-client'; -As we don't want to expose the accessKeyId/accessKeySecret in the -browser, a [common practice][oss-sts] is to use STS to grant temporary -access. +const ossObject = new OSSObject({ + region: '', + endpoint: '', + accessKeyId: '', + accessKeySecret: '', + bucket: '' +}); +``` ## Data Regions @@ -200,7 +130,7 @@ After account created, you can create the OSS instance and get the `accessKeyId` Each OSS instance required `accessKeyId`, `accessKeySecret` and `bucket`. -## oss(options) +## new OSSObject(options) Create a Bucket store instance. @@ -208,33 +138,24 @@ options: - accessKeyId {String} access key you create on aliyun console website - accessKeySecret {String} access secret you create -- [stsToken] {String} used by temporary authorization, detail [see](https://www.alibabacloud.com/help/doc-detail/32077.htm) -- [refreshSTSToken] {Function} used by auto set `stsToken`、`accessKeyId`、`accessKeySecret` when sts info expires. return value must be object contains `stsToken`、`accessKeyId`、`accessKeySecret` -- [refreshSTSTokenInterval] {number} use time (ms) of refresh STSToken interval it should be - less than sts info expire interval, default is 300000ms(5min) - when sts info expires. return value must be object contains `stsToken`、`accessKeyId`、`accessKeySecret` - [bucket] {String} the default bucket you want to access - If you don't have any bucket, please use `putBucket()` create one first. - [endpoint] {String} oss region domain. It takes priority over `region`. Set as extranet domain name, intranet domain name, accelerated domain name, etc. according to different needs. please see [endpoints](https://www.alibabacloud.com/help/doc-detail/31837.htm) - [region] {String} the bucket data region location, please see [Data Regions](#data-regions), default is `oss-cn-hangzhou`. - [internal] {Boolean} access OSS with aliyun internal network or not, default is `false`. If your servers are running on aliyun too, you can set `true` to save lot of money. -- [secure] {Boolean} instruct OSS client to use HTTPS (secure: true) or HTTP (secure: false) protocol. - [timeout] {String|Number} instance level timeout for all operations, default is `60s`. -- [cname] {Boolean}, default false, access oss with custom domain name. if true, you can fill `endpoint` field with your custom domain name, - [isRequestPay] {Boolean}, default false, whether request payer function of the bucket is open, if true, will send headers `'x-oss-request-payer': 'requester'` to oss server. the details you can see [requestPay](https://help.aliyun.com/document_detail/91337.htm) -- [retryMax] {Number}, used by auto retry send request count when request error is net error or timeout. **_NOTE:_** Not support `put` with stream, `putStream`, `append` with stream because the stream can only be consumed once example: 1. basic usage ```js -const { Client } = require('oss-client'); +const { OSSObject } = require('oss-client'); -const store = new Client({ +const store = new OSSObject({ accessKeyId: 'your access key', accessKeySecret: 'your access secret', bucket: 'your bucket name', @@ -248,9 +169,9 @@ const store = new Client({ - Accelerate endpoint of regions outside mainland China: oss-accelerate-overseas.aliyuncs.com ```js -const { Client } = require('oss-client'); +const { OSSObject } = require('oss-client'); -const store = new Client({ +const store = new OSSObject({ accessKeyId: 'your access key', accessKeySecret: 'your access secret', bucket: 'your bucket name', @@ -261,9 +182,9 @@ const store = new Client({ 3. use custom domain ```js -const { Client } = require('oss-client'); +const { OSSObject } = require('oss-client'); -const store = new Client({ +const store = new OSSObject({ accessKeyId: 'your access key', accessKeySecret: 'your access secret', cname: true, @@ -271,67 +192,48 @@ const store = new Client({ }); ``` -4. use STS and refreshSTSToken - -```js -const { Client } = require('oss-client'); - -const store = new Client({ - accessKeyId: 'your STS key', - accessKeySecret: 'your STS secret', - stsToken: 'your STS token', - refreshSTSToken: async () => { - const info = await fetch('you sts server'); - return { - accessKeyId: info.accessKeyId, - accessKeySecret: info.accessKeySecret, - stsToken: info.stsToken - } - }, - refreshSTSTokenInterval: 300000 -}); -``` - -5. retry request with stream - -```js -for (let i = 0; i <= store.options.retryMax; i++) { - try { - const result = await store.putStream("", fs.createReadStream("")); - console.log(result); - break; // break if success - } catch (e) { - console.log(e); - } -} -``` +## Object Operations -## Bucket Operations +All operations function return Promise, except `signatureUrl`. -### .listBuckets(query[, options]) +### .put(name, file[, options]) -List buckets in this account. +Add an object to the bucket. parameters: -- [query] {Object} query parameters, default is `null` - - [prefix] {String} search buckets using `prefix` key - - [marker] {String} search start from `marker`, including `marker` key - - [max-keys] {String|Number} max buckets, default is `100`, limit to `1000` +- name {String} object name store on OSS +- file {String|Buffer|ReadStream} object local path, content buffer or ReadStream content instance use in Node, Blob and html5 File - [options] {Object} optional parameters - [timeout] {Number} the operation timeout + - [mime] {String} custom mime, will send with `Content-Type` entity header + - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string + e.g.: `{ uid: 123, pid: 110 }` + - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
+ - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. + - [host] {String} The host header value for initiating callback requests. + - body {String} The value of the request body when a callback is initiated, for example, `key=${key}&etag=${etag}&my_var=${x:my_var}`. + - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. + - [customValue] {Object} Custom parameters are a map of key-values
+ e.g.: -Success will return buckets list on `buckets` properties. + ```js + var customValue = {var1: 'value1', var2: 'value2'} + ``` -- buckets {Array} bucket meta info list - Each `BucketMeta` will contains blow properties: - - name {String} bucket name - - region {String} bucket store data region, e.g.: `oss-cn-hangzhou-a` - - creationDate {String} bucket create GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - storageClass {String} e.g.: `Standard`, `IA`, `Archive` -- owner {Object} object owner, including `id` and `displayName` -- isTruncated {Boolean} truncate or not -- nextMarker {String} next marker string + - [headers] {Object} extra headers + - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` + - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` + - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` + - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` + - See more: [PutObject](https://help.aliyun.com/document_detail/31978.html#title-yxe-96d-x61) + +Success will return the object information. + +object: + +- name {String} object name +- data {Object} callback server response data, sdk use JSON.parse() return - res {Object} response info, including - status {Number} response status - headers {Object} response headers @@ -340,34 +242,122 @@ Success will return buckets list on `buckets` properties. example: -- List top 10 buckets +- Add an object through local file path + +```js +const filepath = '/home/ossdemo/demo.txt'; +store.put('ossdemo/demo.txt', filepath).then((result) => { + console.log(result); +}); + +{ + name: 'ossdemo/demo.txt', + res: { + status: 200, + headers: { + date: 'Tue, 17 Feb 2015 13:28:17 GMT', + 'content-length': '0', + connection: 'close', + etag: '"BF7A03DA01440845BC5D487B369BC168"', + server: 'AliyunOSS', + 'x-oss-request-id': '54E341F1707AA0275E829244' + }, + size: 0, + rt: 92 + } +} +``` + +- Add an object through content buffer + +```js +store.put('ossdemo/buffer', Buffer.from('foo content')).then((result) => { + console.log(result); +}); + +{ + name: 'ossdemo/buffer', + url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/buffer', + res: { + status: 200, + headers: { + date: 'Tue, 17 Feb 2015 13:28:17 GMT', + 'content-length': '0', + connection: 'close', + etag: '"xxx"', + server: 'AliyunOSS', + 'x-oss-request-id': '54E341F1707AA0275E829243' + }, + size: 0, + rt: 92 + } +} +``` + +- Add an object through readstream ```js -store.listBuckets({ - "max-keys": 10 -}).then((result) => { +const filepath = '/home/ossdemo/demo.txt'; +store.put('ossdemo/readstream.txt', fs.createReadStream(filepath)).then((result) => { console.log(result); }); + +{ + name: 'ossdemo/readstream.txt', + url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/readstream.txt', + res: { + status: 200, + headers: { + date: 'Tue, 17 Feb 2015 13:28:17 GMT', + 'content-length': '0', + connection: 'close', + etag: '"BF7A03DA01440845BC5D487B369BC168"', + server: 'AliyunOSS', + 'x-oss-request-id': '54E341F1707AA0275E829242' + }, + size: 0, + rt: 92 + } +} ``` -### .putBucket(name[, options]) +### .putStream(name, stream[, options]) -Create a new bucket. +Add a stream object to the bucket. parameters: -- name {String} bucket name - If bucket exists and not belong to current account, will throw BucketAlreadyExistsError. - If bucket not exists, will create a new bucket and set it's ACL. +- name {String} object name store on OSS +- stream {ReadStream} object ReadStream content instance - [options] {Object} optional parameters - - [acl] {String} include `private`,`public-read`,`public-read-write` - - [storageClass] {String} the storage type include (Standard,IA,Archive) - - [dataRedundancyType] {String} default `LRS`, include `LRS`,`ZRS` + - [contentLength] {Number} the stream length, `chunked encoding` will be used if absent - [timeout] {Number} the operation timeout + - [mime] {String} custom mime, will send with `Content-Type` entity header + - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string + e.g.: `{ uid: 123, pid: 110 }` + - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
+ - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. + - [host] {String} The host header value for initiating callback requests. + - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. + - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. + - [customValue] {Object} Custom parameters are a map of key-values
+ e.g.: + + ```js + var customValue = {var1: 'value1', var2: 'value2'} + ``` + + - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) + - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` + - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` + - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` + - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` + +Success will return the object information. -Success will return the bucket name on `bucket` properties. +object: -- bucket {String} bucket name +- name {String} object name - res {Object} response info, including - status {Number} response status - headers {Object} response headers @@ -376,281 +366,332 @@ Success will return the bucket name on `bucket` properties. example: -- Create a bucket name `helloworld` location on HongKong +- Add an object through readstream ```js -store.putBucket('helloworld').then((result) => { - // use it by default - store.useBucket('helloworld'); +const filepath = '/home/ossdemo/demo.txt'; +store.putStream('ossdemo/readstream.txt', fs.createReadStream(filepath)).then((result) => { + console.log(result); }); -``` -- Create a bucket name `helloworld` location on HongKong StorageClass `Archive` - -```js -await store.putBucket('helloworld', { StorageClass: 'Archive' }); -// use it by default -store.useBucket('helloworld'); +{ + name: 'ossdemo/readstream.txt', + url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/readstream.txt', + res: { + status: 200, + headers: { + date: 'Tue, 17 Feb 2015 13:28:17 GMT', + 'content-length': '0', + connection: 'close', + etag: '"BF7A03DA01440845BC5D487B369BC168"', + server: 'AliyunOSS', + 'x-oss-request-id': '54E341F1707AA0275E829242' + }, + size: 0, + rt: 92 + } +} ``` -### .deleteBucket(name[, options]) +### .append(name, file[, options]) -Delete an empty bucket. +Append an object to the bucket, it's almost same as put, but it can add content to existing object rather than override it. -parameters: +All parameters are same as put except for options.position -- name {String} bucket name - If bucket is not empty, will throw BucketNotEmptyError. - If bucket is not exists, will throw NoSuchBucketError. +- name {String} object name store on OSS +- file {String|Buffer|ReadStream} object local path, content buffer or ReadStream content instance - [options] {Object} optional parameters + - [position] {String} specify the position which is the content length of the latest object - [timeout] {Number} the operation timeout + - [mime] {String} custom mime, will send with `Content-Type` entity header + - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string + e.g.: `{ uid: 123, pid: 110 }` + - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) + - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` + - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` + - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` + - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` -Success will return: +object: +- name {String} object name +- url {String} the url of oss - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) +- nextAppendPosition {String} the next position example: -- Delete the exists 'helloworld' bucket on 'oss-cn-hongkong' - ```js -store.deleteBucket('helloworld').then((result) => {}); -``` - -### .useBucket(name) - -Use the bucket. - -parameters: - -- name {String} bucket name - -example: - -- Use `helloworld` as the default bucket +let object = await store.append('ossdemo/buffer', Buffer.from('foo')); -```js -store.useBucket('helloworld'); +// append content to the existing object +object = await store.append('ossdemo/buffer', Buffer.from('bar'), { + position: object.nextAppendPosition, +}); ``` -### .getBucketInfo(name) - -Get bucket information,include CreationDate、ExtranetEndpoint、IntranetEndpoint、Location、Name、StorageClass、 -Owner、AccessControlList、Versioning - -parameters: - -- name {String} bucket name +### .generateObjectUrl(name[, baseUrl]) -example: +Get the Object url. +If provide `baseUrl`, will use `baseUrl` instead the default `bucket and endpoint`. +Suggest use generateObjectUrl instead of getObjectUrl. -- Use `helloworld` as the default bucket +e.g.: ```js -store.getBucketInfo('helloworld').then( (res) => { - console.log(res.bucket) -}) -``` +const url = store.generateObjectUrl('foo/bar.jpg'); +// cdnUrl should be `https://${bucketname}.${endpotint}foo/bar.jpg` -### .getBucketStat(name) +const cdnUrl = store.generateObjectUrl('foo/bar.jpg', 'https://mycdn.domian.com'); +// cdnUrl should be `https://mycdn.domian.com/foo/bar.jpg` +``` -Call the GetBucketStat interface to get the storage capacity of the specified storage space (Bucket) and the number of files (Object). +### .head(name[, options]) -Calling this interface requires the oss:GetBucketStat permission. -The data obtained by calling this interface is not real-time data and may be delayed for more than an hour. -The point in time of the stored information obtained by calling this interface is not guaranteed to be up-to-date, i.e. the LastModifiedTime field returned by a later call to this interface may be smaller than the LastModifiedTime field returned by a previous call to this interface. +Head an object and get the meta info. parameters: -- name {String} bucket name +- name {String} object name store on OSS +- [options] {Object} optional parameters + - [timeout] {Number} the operation timeout + - [versionId] {String} the version id of history object + - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) + - 'If-Modified-Since' object modified after this time will return 200 and object meta, + otherwise return 304 not modified + - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-Match' object etag equal this will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-None-Match' object etag not equal this will return 200 and object meta, + otherwise return 304 not modified -Success will return: +Success will return the object's meta information. -- stat {Object} container for the BucketStat structure: - - Storage {String} the total storage capacity of the Bucket, in bytes. - - ObjectCount {String} total number of Objects in the Bucket。 - - MultipartUploadCount {String} the number of Multipart Uploads in the Bucket that have been initialized but not yet completed (Complete) or not yet aborted (Abort). - - LiveChannelCount {String} the number of Live Channels in the Bucket. - - LastModifiedTime {String} the point in time, in timestamps, when the storage information was retrieved. - - StandardStorage {String} the amount of storage of the standard storage type, in bytes. - - StandardObjectCount {String} the number of objects of the standard storage type. - - InfrequentAccessStorage {String} the amount of billed storage for the low-frequency storage type, in bytes. - - InfrequentAccessRealStorage {String} the actual storage amount of the low-frequency storage type, in bytes. - - InfrequentAccessObjectCount {String} the number of Objects of the low-frequency storage type. - - ArchiveStorage {String} the amount of billed storage for the archive storage type, in bytes. - - ArchiveRealStorage {String} the actual storage amount of the archive storage type, in bytes. - - ArchiveObjectCount {String} the number of objects of the archive storage type. - - ColdArchiveStorage {String} the amount of billed storage for the cold archive storage type, in bytes. - - ColdArchiveRealStorage {String} the actual storage amount in bytes for the cold archive storage type. - - ColdArchiveObjectCount {String} the number of objects of the cold archive storage type. +object: +- status {Number} response status, maybe 200 or 304 +- meta {Object} object user meta, if not set on `put()`, will return null. + If return status 304, meta will be null too - res {Object} response info, including - status {Number} response status - headers {Object} response headers + - [x-oss-version-id] return in multiversion - size {Number} response size - rt {Number} request total use time (ms) example: -- If you don't fill in the name, the default is the bucket defined during initialization. +- Head an exists object and get user meta ```js -store.getBucketStat().then(res=>console.log(res)) -``` - -### .getBucketLocation(name) - -Get bucket location - -parameters: - -- name {String} bucket name +await this.store.put('ossdemo/head-meta', Buffer.from('foo'), { + meta: { + uid: 1, + path: 'foo/demo.txt' + } +}); +const object = await this.store.head('ossdemo/head-meta'); +console.log(object); -example: +{ + status: 200, + meta: { + uid: '1', + path: 'foo/demo.txt' + }, + res: { ... } +} +``` -- Use `helloworld` as the default bucket +- Head a not exists object ```js -store.getBucketLocation('helloworld').then( (res) => { - console.log(res.location) -}) +const object = await this.store.head('ossdemo/head-meta'); +// will throw NoSuchKeyError ``` ---- - -### .putBucketACL(name, acl[, options]) +### .getObjectMeta(name[, options]) -Update the bucket ACL. +Get an object meta info include ETag、Size、LastModified and so on, not return object content. parameters: -- name {String} bucket name -- acl {String} access control list, current available: `public-read-write`, `public-read` and `private` +- name {String} object name store on OSS - [options] {Object} optional parameters - [timeout] {Number} the operation timeout + - [versionId] {String} the version id of history object -Success will return: +Success will return the object's meta information. +object: + +- status {Number} response status - res {Object} response info, including - - status {Number} response status - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) example: -- Set bucket `helloworld` to `public-read-write` +- Head an exists object and get object meta info ```js -store.putBucketACL('helloworld', 'public-read-write').then((result) => { -}); +await this.store.put('ossdemo/object-meta', Buffer.from('foo')); +const object = await this.store.getObjectMeta('ossdemo/object-meta'); +console.log(object); + +{ + status: 200, + res: { ... } +} ``` -### .getBucketACL(name[, options]) +### .get(name[, file, options]) -Get the bucket ACL. +Get an object from the bucket. parameters: -- name {String} bucket name +- name {String} object name store on OSS +- [file] {String|WriteStream} file path or WriteStream instance to store the content + If `file` is null or ignore this parameter, function will return info contains `content` property. - [options] {Object} optional parameters + - [versionId] {String} the version id of history object - [timeout] {Number} the operation timeout + - [process] {String} image process params, will send with `x-oss-process` + e.g.: `{process: 'image/resize,w_200'}` + - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) + - 'Range' get specifying range bytes content, e.g.: `Range: bytes=0-9` + - 'If-Modified-Since' object modified after this time will return 200 and object meta, + otherwise return 304 not modified + - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-Match' object etag equal this will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-None-Match' object etag not equal this will return 200 and object meta, + otherwise return 304 not modified -Success will return: +Success will return the info contains response. -- acl {String} acl settiongs string +object: + +- [content] {Buffer} file content buffer if `file` parameter is null or ignore - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) +If object not exists, will throw NoSuchKeyError. + example: -- Get bucket `helloworld` +- Get an exists object and store it to the local file ```js -store.getBucketACL('helloworld').then((result) => { - console.log(result.acl); -}); +const filepath = '/home/ossdemo/demo.txt'; +await store.get('ossdemo/demo.txt', filepath); ``` ---- +_ Store object to a writestream -### .putBucketLogging(name, prefix[, options]) +```js +await store.get('ossdemo/demo.txt', somestream); +``` -Update the bucket logging settings. -Log file will create every one hour and name format: `-YYYY-mm-DD-HH-MM-SS-UniqueString`. +- Get an object content buffer -parameters: +```js +const result = await store.get('ossdemo/demo.txt'); +console.log(Buffer.isBuffer(result.content)); +``` -- name {String} bucket name -- [prefix] {String} prefix path name to store the log files -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +- Get a processed image and store it to the local file -Success will return: +```js +const filepath = '/home/ossdemo/demo.png'; +await store.get('ossdemo/demo.png', filepath, {process: 'image/resize,w_200'}); +``` -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) +- Get a not exists object -example: +```js +const filepath = '/home/ossdemo/demo.txt'; +await store.get('ossdemo/not-exists-demo.txt', filepath); +// will throw NoSuchKeyError +``` -- Enable bucket `helloworld` logging and save with prefix `logs/` +- Get a historic version object ```js -store.putBucketLogging('helloworld', 'logs/').then((result) => { +const filepath = '/home/ossdemo/demo.txt'; +const versionId = 'versionId string'; +await store.get('ossdemo/not-exists-demo.txt', filepath, { + versionId }); ``` -### .getBucketLogging(name[, options]) +### .getStream(name[, options]) -Get the bucket logging settings. +Get an object read stream. parameters: -- name {String} bucket name +- name {String} object name store on OSS - [options] {Object} optional parameters - [timeout] {Number} the operation timeout + - [process] {String} image process params, will send with `x-oss-process` + - [headers] {Object} extra headers + - 'If-Modified-Since' object modified after this time will return 200 and object meta, + otherwise return 304 not modified + - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-Match' object etag equal this will return 200 and object meta, + otherwise throw PreconditionFailedError + - 'If-None-Match' object etag not equal this will return 200 and object meta, + otherwise return 304 not modified -Success will return: +Success will return the stream instance and response info. -- enable {Boolean} enable logging or not -- prefix {String} prefix path name to store the log files, maybe `null` +object: + +- stream {ReadStream} readable stream instance + if response status is not 200, stream will be `null`. - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) +If object not exists, will throw NoSuchKeyError. + example: -- Get bucket `helloworld` logging settings +- Get an exists object stream ```js -store.getBucketLogging('helloworld').then((result) => { - console.log(result.enable, result.prefix); -}); +const result = await store.getStream('ossdemo/demo.txt'); +result.stream.pipe(fs.createWriteStream('some file.txt')); ``` -### .deleteBucketLogging(name[, options]) +### .delete(name[, options]) -Delete the bucket logging settings. +Delete an object from the bucket. parameters: -- name {String} bucket name +- name {String} object name store on OSS - [options] {Object} optional parameters - [timeout] {Number} the operation timeout + - [versionId] {String} the version id of history object -Success will return: +Success will return the info contains response. + +object: - res {Object} response info, including - status {Number} response status @@ -658,202 +699,163 @@ Success will return: - size {Number} response size - rt {Number} request total use time (ms) ---- - -### .putBucketWebsite(name, config[, options]) +If delete object not exists, will also delete success. -Set the bucket as a static website. +example: -parameters: +- Delete an exists object -- name {String} bucket name -- config {Object} website config, contains blow properties: - - index {String} default page, e.g.: `index.html` - - [error] {String} error page, e.g.: 'error.html' - - [supportSubDir] {String} default vaule false - - [type] {String} default value 0 - - [routingRules] {Array} RoutingRules -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +```js +await store.delete('ossdemo/someobject'); +``` -Success will return: +- Delete a not exists object -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) +```js +await store.delete('ossdemo/some-not-exists-object'); +``` -example: +- Delete a history object or deleteMarker ```js -store.putBucketWebsite('hello', { - index: 'index.html' -}).then((result) => { -}); +const versionId = 'versionId'; +await store.delete('ossdemo/some-not-exists-object', { versionId }); ``` -### .getBucketWebsite(name[, options]) +### .copy(name, sourceName[, sourceBucket, options]) -Get the bucket website config. +Copy an object from `sourceName` to `name`. parameters: -- name {String} bucket name +- name {String} object name store on OSS +- sourceName {String} source object name +- [sourceBucket] {String} source Bucket. if doesn't exist,`sourceBucket` is same bucket. - [options] {Object} optional parameters + - [versionId] {String} the version id of history object - [timeout] {Number} the operation timeout + - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string + e.g.: `{ uid: 123, pid: 110 }` + If the `meta` set, will override the source object meta. + - [headers] {Object} extra headers + - 'If-Match' do copy if source object etag equal this, + otherwise throw PreconditionFailedError + - 'If-None-Match' do copy if source object etag not equal this, + otherwise throw PreconditionFailedError + - 'If-Modified-Since' do copy if source object modified after this time, + otherwise throw PreconditionFailedError + - 'If-Unmodified-Since' do copy if source object modified before this time, + otherwise throw PreconditionFailedError + - See more: [CopyObject](https://help.aliyun.com/document_detail/31979.html?#title-tzy-vxc-ncx) -Success will return: +Success will return the copy result in `data` property. -- index {String} index page -- error {String} error page, maybe `null` -- supportSubDir {String} -- type {String} -- routingRules {Array} +object: + +- data {Object} copy result + - lastModified {String} object last modified GMT string + - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -### .deleteBucketWebsite(name[, options]) - -Delete the bucket website config. +If source object not exists, will throw NoSuchKeyError. -parameters: +example: -- name {String} bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +- Copy same bucket object -Success will return: +```js +store.copy('newName', 'oldName').then((result) => { + console.log(result); +}); +``` -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) +- Copy other bucket object ---- +```js +store.copy('logo.png', 'logo.png', 'other-bucket').then((result) => { + console.log(result); +}); +``` -### .putBucketReferer(name, allowEmpty, referers[, options]) +- Copy historic object -Set the bucket request `Referer` white list. +```js +const versionId = 'your verisonId' +store.copy('logo.png', 'logo.png', 'other-bucket', { versionId }).then((result) => { + console.log(result); +}); +``` -parameters: +### .putMeta(name, meta[, options]) -- name {String} bucket name -- allowEmpty {Boolean} allow empty request referer or not -- referers {Array} `Referer` white list, e.g.: +Set an exists object meta. - ```js - [ - 'https://npm.taobao.org', - 'http://cnpmjs.org' - ] - ``` +parameters: +- name {String} object name store on OSS +- meta {Object} user meta, will send with `x-oss-meta-` prefix string + e.g.: `{ uid: 123, pid: 110 }` + If `meta: null`, will clean up the exists meta - [options] {Object} optional parameters - [timeout] {Number} the operation timeout -Success will return: +Success will return the putMeta result in `data` property. +- data {Object} copy result + - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` + - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) +If object not exists, will throw NoSuchKeyError. + example: +- Update exists object meta + ```js -store.putBucketReferer('hello', false, [ - 'https://npm.taobao.org', - 'http://cnpmjs.org' -]).then((result) => { +const result = await store.putMeta('ossdemo.txt', { + uid: 1, pid: 'p123' }); +console.log(result); ``` -### .getBucketReferer(name[, options]) +- Clean up object meta -Get the bucket request `Referer` white list. +```js +await store.putMeta('ossdemo.txt', null); +``` -parameters: - -- name {String} bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return: - -- allowEmpty {Boolean} allow empty request referer or not -- referers {Array} `Referer` white list -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -### .deleteBucketReferer(name[, options]) - -Delete the bucket request `Referer` white list. - -parameters: - -- name {String} bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - ---- - -### .putBucketLifecycle(name, rules[, options]) +### .deleteMulti(names[, options]) -Set the bucket object lifecycle. +Delete multi objects in one request. parameters: -- name {String} bucket name -- rules {Array} rule config list, each `Rule` will contains blow properties: - - [id] {String} rule id, if not set, OSS will auto create it with random string. - - prefix {String} store prefix - - status {String} rule status, allow values: `Enabled` or `Disabled` - - [expiration] {Object} specifies the expiration attribute of the lifecycle rules for the object. - - [days] {Number|String} expire after the `days` - - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z` - - [expiredObjectDeleteMarker] {String} value `true` - `createdBeforeDate` and `days` and `expiredObjectDeleteMarker` must have one. - - [abortMultipartUpload] {Object} Specifies the expiration attribute of the multipart upload tasks that are not complete. - - [days] {Number|String} expire after the `days` - - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z` - `createdBeforeDate` and `days` must have one. - - [transition] {Object} Specifies the time when an object is converted to the IA or archive storage class during a valid life cycle. - - storageClass {String} Specifies the storage class that objects that conform to the rule are converted into. allow values: `IA` or `Archive` - - [days] {Number|String} expire after the `days` - - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z` - `createdBeforeDate` and `days` must have one. - - [noncurrentVersionTransition] {Object} Specifies the time when an object is converted to the IA or archive storage class during a valid life cycle. - - storageClass {String} Specifies the storage class that history objects that conform to the rule are converted into. allow values: `IA` or `Archive` - - noncurrentDays {String} expire after the `noncurrentDays` - `expiration`、 `abortMultipartUpload`、 `transition`、 `noncurrentVersionTransition` must have one. - - [noncurrentVersionExpiration] {Object} specifies the expiration attribute of the lifecycle rules for the history object. - - noncurrentDays {String} expire after the `noncurrentDays` - - [tag] {Object} Specifies the object tag applicable to a rule. Multiple tags are supported. - - key {String} Indicates the tag key. - - value {String} Indicates the tag value. - `tag` cannot be used with `abortMultipartUpload` +- names {Array} object names, max 1000 objects in once. + - key {String} object name + - [versionId] {String} the version id of history object or deleteMarker - [options] {Object} optional parameters + - [quiet] {Boolean} quiet mode or verbose mode, default is `false`, verbose mode + quiet mode: if all objects delete succes, return emtpy response. + otherwise return delete error object results. + verbose mode: return all object delete results. - [timeout] {Number} the operation timeout -Success will return: +Success will return delete success objects in `deleted` property. +- [deleted] {Array} deleted object or deleteMarker info list + - [Key] {String} object name + - [VersionId] {String} object versionId + - [DeleteMarker] {String} generate or delete marker + - [DeleteMarkerVersionId] {String} marker versionId - res {Object} response info, including - status {Number} response status - headers {Object} response headers @@ -862,3546 +864,707 @@ Success will return: example: -```js -store.putBucketLifecycle('hello', [ - { - id: 'delete after one day', - prefix: 'logs/', - status: 'Enabled', - days: 1 - }, - { - prefix: 'logs2/', - status: 'Disabled', - date: '2022-10-11T00:00:00.000Z' - } -]).then((result) => {}); -``` - -example: for history with noncurrentVersionExpiration +- Delete multi objects in quiet mode ```js - const result = await store.putBucketLifecycle(bucket, [{ - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: '1' - }, - noncurrentVersionExpiration: { - noncurrentDays: '1' - } -}]); -console.log(result) +const result = await store.deleteMulti(['obj1', 'obj2', 'obj3'], { + quiet: true +}); ``` -example: for history with expiredObjectDeleteMarker +- Delete multi objects in verbose mode ```js - const result = await store.putBucketLifecycle(bucket, [{ - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - expiredObjectDeleteMarker: 'true' - }, - noncurrentVersionExpiration: { - noncurrentDays: '1' - } -}]); -console.log(result) +const result = await store.deleteMulti(['obj1', 'obj2', 'obj3']); ``` -example: for history with noncurrentVersionTransition +- Delete multi objects in multiversion ```js - const result = await store.putBucketLifecycle(bucket, [{ - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - noncurrentVersionTransition: { - noncurrentDays: '10', - storageClass: 'IA' - } -}]); -console.log(result) +const obj1 = { + key: 'key1', + versionId: 'versionId1' +} +const obj2 = { + key: 'key2', + versionId: 'versionId2' +} +const result = await store.deleteMulti([obj1, obj2]); ``` -### .getBucketLifecycle(name[, options]) +### .list(query[, options]) -Get the bucket object lifecycle. +List objects in the bucket. parameters: -- name {String} bucket name +- [query] {Object} query parameters, default is `null` + - [prefix] {String} search object using `prefix` key + - [marker] {String} search start from `marker`, including `marker` key + - [delimiter] {String} delimiter search scope + e.g. `/` only search current dir, not including subdir + - [max-keys] {String|Number} max objects, default is `100`, limit to `1000` - [options] {Object} optional parameters - [timeout] {Number} the operation timeout -Success will return: +Success will return objects list on `objects` properties. -- rules {Array} the lifecycle rule list +- objects {Array} object meta info list + Each `ObjectMeta` will contains blow properties: + - name {String} object name on oss + - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` + - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` + - type {String} object type, e.g.: `Normal` + - size {Number} object size, e.g.: `344606` + - storageClass {String} storage class type, e.g.: `Standard` + - owner {Object} object owner, including `id` and `displayName` +- prefixes {Array} prefix list +- isTruncated {Boolean} truncate or not +- nextMarker {String} next marker string - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -### .deleteBucketLifecycle(name[, options]) +example: -Delete the bucket object lifecycle. +- List top 10 objects -parameters: +```js +const result = await store.list(); +console.log(result.objects); +``` -- name {String} bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +- List `fun/` dir including subdirs objects -Success will return: +```js +const result = await store.list({ + prefix: 'fun/' +}); +console.log(result.objects); +``` -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) +- List `fun/` dir objects, not including subdirs ---- +```js +const result = await store.list({ + prefix: 'fun/', + delimiter: '/' +}); +console.log(result.objects); +``` -### .putBucketCORS(name, rules[, options]) +### .listV2(query[, options]) -Set CORS rules of the bucket object +List objects in the bucket.(recommended) parameters: -- name {String} bucket name -- rules {Array} rule config list, each `Rule` will contains below properties: - - allowedOrigin {String/Array} configure for Access-Control-Allow-Origin header - - allowedMethod {String/Array} configure for Access-Control-Allow-Methods header - - [allowedHeader] {String/Array} configure for Access-Control-Allow-Headers header - - [exposeHeader] {String/Array} configure for Access-Control-Expose-Headers header - - [maxAgeSeconds] {String} configure for Access-Control-Max-Age header +- [query] {Object} query parameters, default is `null` + - [prefix] {String} search object using `prefix` key + - [continuation-token] (continuationToken) {String} search start from `continuationToken`, including `continuationToken` key + - [delimiter] {String} delimiter search scope + e.g. `/` only search current dir, not including subdir + - [max-keys] {String|Number} max objects, default is `100`, limit to `1000` + - [start-after] {String} specifies the Start-after value from which to start the list. The names of objects are returned in alphabetical order. + - [fetch-owner] {Boolean} specifies whether to include the owner information in the response. - [options] {Object} optional parameters - [timeout] {Number} the operation timeout -Success will return: +Success will return objects list on `objects` properties. +- objects {Array} object meta info list + Each `ObjectMeta` will contains blow properties: + - name {String} object name on oss + - url {String} resource url + - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` + - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` + - type {String} object type, e.g.: `Normal` + - size {Number} object size, e.g.: `344606` + - storageClass {String} storage class type, e.g.: `Standard` + - owner {Object|null} object owner, including `id` and `displayName` +- prefixes {Array} prefix list +- isTruncated {Boolean} truncate or not +- nextContinuationToken {String} next continuation-token string +- keyCount {Number} The number of keys returned for this request. If Delimiter is specified, KeyCount is the sum of the elements in Key and CommonPrefixes. - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -example: +- List top 10 objects ```js -store.putBucketCORS('hello', [ - { - allowedOrigin: '*', - allowedMethod: [ - 'GET', - 'HEAD', - ], - } -]).then((result) => {}); +const result = await store.listV2({ + 'max-keys': 10 +}); +console.log(result.objects); ``` -### .getBucketCORS(name[, options]) +- List `fun/` dir including subdirs objects -Get CORS rules of the bucket object. +```js +const result = await store.listV2({ + prefix: 'fun/' +}); +console.log(result.objects); +``` -parameters: +- List `fun/` dir objects, not including subdirs -- name {String} bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +```js +const result = await store.listV2({ + prefix: 'fun/', + delimiter: '/' +}); +console.log(result.objects); +``` -Success will return: +- List `a/` dir objects, after `a/b` and not include `a/b` -- rules {Array} the CORS rule list -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) +```js +const result = await store.listV2({ + delimiter: '/', + prefix: 'a/', + 'start-after': 'a/b' +}); +console.log(result.objects); +``` -### .deleteBucketCORS(name[, options]) +### .getBucketVersions(query[, options]) -Delete CORS rules of the bucket object. +List the version information of all objects in the bucket, including the delete marker (Delete Marker). parameters: -- name {String} bucket name +- [query] {Object} query parameters, default is `null` + - [prefix] {String} search object using `prefix` key + - [versionIdMarker] {String} set the result to return from the version ID marker of the key marker object and sort by the versions + - [keyMarker] {String} search start from `keyMarker`, including `keyMarker` key + - [encodingType] {String} specifies that the returned content is encoded, and specifies the type of encoding + - [delimiter] {String} delimiter search scope + e.g. `/` only search current dir, not including subdir + - [maxKeys] {String|Number} max objects, default is `100`, limit to `1000` - [options] {Object} optional parameters - [timeout] {Number} the operation timeout -Success will return: +Success will return objects list on `objects` properties. +- objects {Array} object meta info list + Each `ObjectMeta` will contains blow properties: + - name {String} object name on oss + - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` + - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` + - type {String} object type, e.g.: `Normal` + - size {Number} object size, e.g.: `344606` + - isLatest {Boolean} + - versionId {String} object versionId + - storageClass {String} storage class type, e.g.: `Standard` + - owner {Object} object owner, including `id` and `displayName` +- deleteMarker {Array} object delete marker info list + Each `ObjectDeleteMarker` + - name {String} object name on oss + - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` + - versionId {String} object versionId +- isTruncated {Boolean} truncate or not +- nextKeyMarker (nextMarker) {String} next marker string +- nextVersionIdMarker (NextVersionIdMarker) {String} next version ID marker string - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -### .getBucketRequestPayment(bucketName[, options]) +example: -get RequestPayment value of the bucket object. +- View all versions of objects and deleteMarker of bucket -parameters: +```js +const result = await store.getBucketVersions(); +console.log(result.objects); +console.log(result.deleteMarker); +``` -- bucketName {String} bucket name -- [options] {Object} optional parameters +- List from key-marker -Success will return: +```js +const result = await store.getBucketVersions({ + 'keyMarker': 'keyMarker' +}); +console.log(result.objects); +``` -- status {Number} response status -- payer {String} payer, BucketOwner or Requester -- res {Object} response info, including - - data {Buffer} xml +- List from the version-id-marker of key-marker ---- +```js +const result = await store.getBucketVersions({ + 'versionIdMarker': 'versionIdMarker', + 'keyMarker': 'keyMarker' +}); +console.log(result.objects); +console.log(result.deleteMarker); +``` -### .putBucketRequestPayment(bucketName, payer[, options]) +### .signatureUrl(name[, options]) -put RequestPayment value of the bucket object. +Create a signature url for download or upload object. When you put object with signatureUrl ,you need to pass `Content-Type`.Please look at the example. parameters: -- bucketName {String} -- payer {String} payer +- name {String} object name store on OSS - [options] {Object} optional parameters + - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` + - [method] {String} the HTTP method, default is 'GET' + - [Content-Type] {String} set the request content type + - [process] {String} image process params, will send with `x-oss-process` + e.g.: `{process: 'image/resize,w_200'}` + - [trafficLimit] {Number} traffic limit, range: `819200`~`838860800`. + - [subResource] {Object} additional signature parameters in url. + - [response] {Object} set the response headers for download + - [content-type] {String} set the response content type + - [content-disposition] {String} set the response content disposition + - [cache-control] {String} set the response cache control + - See more: + - [callback] {Object} set the callback for the operation + - url {String} set the url for callback + - [host] {String} set the host for callback + - body {String} set the body for callback + - [contentType] {String} set the type for body + - [customValue] {Object} set the custom value for callback,eg. {var1: value1,var2:value2} -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .putBucketEncryption(name, rules) - -put BucketEncryption value of the bucket object. - -parameters: - -- name {String} bucket name -- [rules] {Object} parameters - - SSEAlgorithm {String} encryption type, expect AES256 or KMS - - {KMSMasterKeyID} {String} needed when encryption type is KMS - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .getBucketEncryption(name) - -get BucketEncryption rule value of the bucket object. - -parameters: - -- name {String} bucket name - -Success will return: - -- status {Number} response status -- res {Object} response info -- encryption {Object} rules - - SSEAlgorithm {String} encryption type, AES256 or KMS - - {KMSMasterKeyID} {String} will be return when encryption type is KMS - ---- - -### .deleteBucketEncryption(name) - -delete BucketEncryption rule value of the bucket object. - -parameters: - -- name {String} bucket name - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .putBucketTags(name, tag[, options]) - -Adds tags for a bucket or modify the tags for a bucket. - -parameters: - -- name {String} the object name -- tag {Object} tag, eg. `{var1: value1,var2:value2}` -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .getBucketTags(name[, options]) - -Obtains the tags for a bucket. - -parameters: - -- name {String} the object name -- [options] {Object} optional args - -Success will return: - -- tag {Object} the tag of object -- res {Object} response info - ---- - -### .deleteBucketTags(name[, options]) - -Deletes the tags added for a bucket. - -parameters: - -- name {String} the object name -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .putBucketPolicy(name, policy[, options]) - -Adds or modify policy for a bucket. - -parameters: - -- name {String} the bucket name -- policy {Object} bucket policy -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - -example: - -```js -const policy = { - Version: '1', - Statement: [ - { - Action: ['oss:PutObject', 'oss:GetObject'], - Effect: 'Deny', - Principal: ['1234567890'], - Resource: ['acs:oss:*:1234567890:*/*'] - } - ] -}; -const result = await store.putBucketPolicy(bucket, policy); -console.log(result); -``` - ---- - -### .getBucketPolicy(name[, options]) - -Obtains the policy for a bucket. - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - -Success will return: - -- policy {Object} the policy of bucket, if not exist, the value is null -- res {Object} response info -- status {Number} response status - ---- - -### .deleteBucketPolicy(name[, options]) - -Deletes the policy added for a bucket. - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .getBucketVersioning(name[, options]) - -Obtains the version status of an object - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- versionStatus {String | undefined} version status, `Suspended` or `Enabled`. default value: `undefined` -- res {Object} response info - ---- - -### .putBucketVersioning(name, status[, options]) - -set the version status of an object - -parameters: - -- name {String} the bucket name -- status {String} version status, allow values: `Enabled` or `Suspended` -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .getBucketInventory(name, inventoryId[, options]) - -get bucket inventory by inventory-id - -parameters: - -- name {String} the bucket name -- inventoryId {String} inventory-id -- [options] {Object} optional args - -Success will return: - -- inventory {Inventory} -- status {Number} response status -- res {Object} response info - -```js -async function getBucketInventoryById() { - try { - const result = await store.getBucketInventory('bucket', 'inventoryid'); - console.log(result.inventory) - } catch (err) { - console.log(err) - } -} - -getBucketInventoryById(); -``` - -### putBucketInventory(name, inventory[, options]) - -set bucket inventory - -parameters: - -- name {String} the bucket name -- inventory {Inventory} inventory config -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - -```ts -type Field = 'Size | LastModifiedDate | ETag | StorageClass | IsMultipartUploaded | EncryptionStatus'; -interface Inventory { - id: string; - isEnabled: true | false; - prefix?: string; - OSSBucketDestination: { - format: 'CSV'; - accountId: string; - rolename: string; - bucket: string; - prefix?: string; - encryption?: - | {'SSE-OSS': ''} - | { - 'SSE-KMS': { - keyId: string; - }; - }; - }; - frequency: 'Daily' | 'Weekly'; - includedObjectVersions: 'Current' | 'All'; - optionalFields?: { - field?: Field[]; - }; -} -``` - -```js -const inventory = { - id: 'default', - isEnabled: false, // `true` | `false` - prefix: 'ttt', // filter prefix - OSSBucketDestination: { - format: 'CSV', - accountId: '1817184078010220', - rolename: 'AliyunOSSRole', - bucket: 'your bucket', - prefix: 'test', - //encryption: {'SSE-OSS': ''}, - /* - encryption: { - 'SSE-KMS': { - keyId: 'test-kms-id'; - };, - */ - }, - frequency: 'Daily', // `WEEKLY` | `Daily` - includedObjectVersions: 'All', // `All` | `Current` - optionalFields: { - field: ["Size", "LastModifiedDate", "ETag", "StorageClass", "IsMultipartUploaded", "EncryptionStatus"] - }, -} - -async function putInventory(){ - const bucket = 'Your Bucket Name'; - try { - await store.putBucketInventory(bucket, inventory); - } catch(err) { - console.log(err); - } -} - -putInventory() -``` - -### deleteBucketInventory(name, inventoryId[, options]) - -delete bucket inventory by inventory-id - -parameters: - -- name {String} the bucket name -- inventoryId {String} inventory-id -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - -### listBucketInventory(name[, options]) - -list bucket inventory - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - - continuationToken used by search next page - -Success will return: - -- status {Number} response status -- res {Object} response info - -example: - -```js -async function listBucketInventory() { - const bucket = 'Your Bucket Name'; - let nextContinuationToken; - // list all inventory of the bucket - do { - const result = await store.listBucketInventory(bucket, nextContinuationToken); - console.log(result.inventoryList); - nextContinuationToken = result.nextContinuationToken; - } while (nextContinuationToken) -} - -listBucketInventory(); -``` - -### .abortBucketWorm(name[, options]) - -used to delete an unlocked retention policy. - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .completeBucketWorm(name, wormId[, options]) - -used to lock a retention policy. - -parameters: - -- name {String} the bucket name -- wormId {String} worm id -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .extendBucketWorm(name, wormId, days[, options]) - - used to extend the retention period of objects in a bucket whose retention policy is locked. - -parameters: - -- name {String} the bucket name -- wormId {String} worm id -- days {String | Number} retention days -- [options] {Object} optional args - -Success will return: - -- status {Number} response status -- res {Object} response info - ---- - -### .getBucketWorm(name[, options]) - - used to query the retention policy information of the specified bucket. - -parameters: - -- name {String} the bucket name -- [options] {Object} optional args - -Success will return: - -- wormId {String} worm id -- state {String} `Locked` or `InProgress` -- days {String} retention days -- creationDate {String} -- status {Number} response status -- res {Object} response info - ---- - -### .initiateBucketWorm(name, days[, options]) - -create a retention policy. - -parameters: - -- name {String} the bucket name -- days {String | Number}} set retention days -- [options] {Object} optional args - -Success will return: - -- wormId {String} worm id -- status {Number} response status -- res {Object} response info - ---- - -## Object Operations - -All operations function return Promise, except `signatureUrl`. - -### .put(name, file[, options]) - -Add an object to the bucket. - -parameters: - -- name {String} object name store on OSS -- file {String|Buffer|ReadStream} object local path, content buffer or ReadStream content instance use in Node, Blob and html5 File -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [mime] {String} custom mime, will send with `Content-Type` entity header - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - e.g.: `{ uid: 123, pid: 110 }` - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, `key=${key}&etag=${etag}&my_var=${x:my_var}`. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - See more: [PutObject](https://help.aliyun.com/document_detail/31978.html#title-yxe-96d-x61) - -Success will return the object information. - -object: - -- name {String} object name -- data {Object} callback server response data, sdk use JSON.parse() return -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Add an object through local file path - -```js -const filepath = '/home/ossdemo/demo.txt'; -store.put('ossdemo/demo.txt', filepath).then((result) => { - console.log(result); -}); - -{ - name: 'ossdemo/demo.txt', - res: { - status: 200, - headers: { - date: 'Tue, 17 Feb 2015 13:28:17 GMT', - 'content-length': '0', - connection: 'close', - etag: '"BF7A03DA01440845BC5D487B369BC168"', - server: 'AliyunOSS', - 'x-oss-request-id': '54E341F1707AA0275E829244' - }, - size: 0, - rt: 92 - } -} -``` - -- Add an object through content buffer - -```js -store.put('ossdemo/buffer', Buffer.from('foo content')).then((result) => { - console.log(result); -}); - -{ - name: 'ossdemo/buffer', - url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/buffer', - res: { - status: 200, - headers: { - date: 'Tue, 17 Feb 2015 13:28:17 GMT', - 'content-length': '0', - connection: 'close', - etag: '"xxx"', - server: 'AliyunOSS', - 'x-oss-request-id': '54E341F1707AA0275E829243' - }, - size: 0, - rt: 92 - } -} -``` - -- Add an object through readstream - -```js -const filepath = '/home/ossdemo/demo.txt'; -store.put('ossdemo/readstream.txt', fs.createReadStream(filepath)).then((result) => { - console.log(result); -}); - -{ - name: 'ossdemo/readstream.txt', - url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/readstream.txt', - res: { - status: 200, - headers: { - date: 'Tue, 17 Feb 2015 13:28:17 GMT', - 'content-length': '0', - connection: 'close', - etag: '"BF7A03DA01440845BC5D487B369BC168"', - server: 'AliyunOSS', - 'x-oss-request-id': '54E341F1707AA0275E829242' - }, - size: 0, - rt: 92 - } -} -``` - -### .putStream(name, stream[, options]) - -Add a stream object to the bucket. - -parameters: - -- name {String} object name store on OSS -- stream {ReadStream} object ReadStream content instance -- [options] {Object} optional parameters - - [contentLength] {Number} the stream length, `chunked encoding` will be used if absent - - [timeout] {Number} the operation timeout - - [mime] {String} custom mime, will send with `Content-Type` entity header - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - e.g.: `{ uid: 123, pid: 110 }` - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - -Success will return the object information. - -object: - -- name {String} object name -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Add an object through readstream - -```js -const filepath = '/home/ossdemo/demo.txt'; -store.putStream('ossdemo/readstream.txt', fs.createReadStream(filepath)).then((result) => { - console.log(result); -}); - -{ - name: 'ossdemo/readstream.txt', - url: 'http://demo.oss-cn-hangzhou.aliyuncs.com/ossdemo/readstream.txt', - res: { - status: 200, - headers: { - date: 'Tue, 17 Feb 2015 13:28:17 GMT', - 'content-length': '0', - connection: 'close', - etag: '"BF7A03DA01440845BC5D487B369BC168"', - server: 'AliyunOSS', - 'x-oss-request-id': '54E341F1707AA0275E829242' - }, - size: 0, - rt: 92 - } -} -``` - -### .append(name, file[, options]) - -Append an object to the bucket, it's almost same as put, but it can add content to existing object rather than override it. - -All parameters are same as put except for options.position - -- name {String} object name store on OSS -- file {String|Buffer|ReadStream} object local path, content buffer or ReadStream content instance -- [options] {Object} optional parameters - - [position] {String} specify the position which is the content length of the latest object - - [timeout] {Number} the operation timeout - - [mime] {String} custom mime, will send with `Content-Type` entity header - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - e.g.: `{ uid: 123, pid: 110 }` - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - -object: - -- name {String} object name -- url {String} the url of oss -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- nextAppendPosition {String} the next position - -example: - -```js -let object = await store.append('ossdemo/buffer', Buffer.from('foo')); - -// append content to the existing object -object = await store.append('ossdemo/buffer', Buffer.from('bar'), { - position: object.nextAppendPosition, -}); -``` - -### .getObjectUrl(name[, baseUrl]) - -Get the Object url. -If provide `baseUrl`, will use `baseUrl` instead the default `endpoint`. - -e.g.: - -```js -const cdnUrl = store.getObjectUrl('foo/bar.jpg', 'https://mycdn.domian.com'); -// cdnUrl should be `https://mycdn.domian.com/foo/bar.jpg` -``` - -### .generateObjectUrl(name[, baseUrl]) - -Get the Object url. -If provide `baseUrl`, will use `baseUrl` instead the default `bucket and endpoint`. -Suggest use generateObjectUrl instead of getObjectUrl. - -e.g.: - -```js -const url = store.generateObjectUrl('foo/bar.jpg'); -// cdnUrl should be `https://${bucketname}.${endpotint}foo/bar.jpg` - -const cdnUrl = store.generateObjectUrl('foo/bar.jpg', 'https://mycdn.domian.com'); -// cdnUrl should be `https://mycdn.domian.com/foo/bar.jpg` -``` - -### .head(name[, options]) - -Head an object and get the meta info. - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the object's meta information. - -object: - -- status {Number} response status, maybe 200 or 304 -- meta {Object} object user meta, if not set on `put()`, will return null. - If return status 304, meta will be null too -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - [x-oss-version-id] return in multiversion - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Head an exists object and get user meta - -```js -await this.store.put('ossdemo/head-meta', Buffer.from('foo'), { - meta: { - uid: 1, - path: 'foo/demo.txt' - } -}); -const object = await this.store.head('ossdemo/head-meta'); -console.log(object); - -{ - status: 200, - meta: { - uid: '1', - path: 'foo/demo.txt' - }, - res: { ... } -} -``` - -- Head a not exists object - -```js -const object = await this.store.head('ossdemo/head-meta'); -// will throw NoSuchKeyError -``` - -### .getObjectMeta(name[, options]) - -Get an object meta info include ETag、Size、LastModified and so on, not return object content. - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - -Success will return the object's meta information. - -object: - -- status {Number} response status -- res {Object} response info, including - - headers {Object} response headers - -example: - -- Head an exists object and get object meta info - -```js -await this.store.put('ossdemo/object-meta', Buffer.from('foo')); -const object = await this.store.getObjectMeta('ossdemo/object-meta'); -console.log(object); - -{ - status: 200, - res: { ... } -} -``` - -### .get(name[, file, options]) - -Get an object from the bucket. - -parameters: - -- name {String} object name store on OSS -- [file] {String|WriteStream} file path or WriteStream instance to store the content - If `file` is null or ignore this parameter, function will return info contains `content` property. -- [options] {Object} optional parameters - - [versionId] {String} the version id of history object - - [timeout] {Number} the operation timeout - - [process] {String} image process params, will send with `x-oss-process` - e.g.: `{process: 'image/resize,w_200'}` - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Range' get specifying range bytes content, e.g.: `Range: bytes=0-9` - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the info contains response. - -object: - -- [content] {Buffer} file content buffer if `file` parameter is null or ignore -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If object not exists, will throw NoSuchKeyError. - -example: - -- Get an exists object and store it to the local file - -```js -const filepath = '/home/ossdemo/demo.txt'; -await store.get('ossdemo/demo.txt', filepath); -``` - -_ Store object to a writestream - -```js -await store.get('ossdemo/demo.txt', somestream); -``` - -- Get an object content buffer - -```js -const result = await store.get('ossdemo/demo.txt'); -console.log(Buffer.isBuffer(result.content)); -``` - -- Get a processed image and store it to the local file - -```js -const filepath = '/home/ossdemo/demo.png'; -await store.get('ossdemo/demo.png', filepath, {process: 'image/resize,w_200'}); -``` - -- Get a not exists object - -```js -const filepath = '/home/ossdemo/demo.txt'; -await store.get('ossdemo/not-exists-demo.txt', filepath); -// will throw NoSuchKeyError -``` - -- Get a historic version object - -```js -const filepath = '/home/ossdemo/demo.txt'; -const versionId = 'versionId string'; -await store.get('ossdemo/not-exists-demo.txt', filepath, { - versionId -}); -``` - -### .getStream(name[, options]) - -Get an object read stream. - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [process] {String} image process params, will send with `x-oss-process` - - [headers] {Object} extra headers - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the stream instance and response info. - -object: - -- stream {ReadStream} readable stream instance - if response status is not 200, stream will be `null`. -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If object not exists, will throw NoSuchKeyError. - -example: - -- Get an exists object stream - -```js -const result = await store.getStream('ossdemo/demo.txt'); -result.stream.pipe(fs.createWriteStream('some file.txt')); -``` - -### .delete(name[, options]) - -Delete an object from the bucket. - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - -Success will return the info contains response. - -object: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If delete object not exists, will also delete success. - -example: - -- Delete an exists object - -```js -await store.delete('ossdemo/someobject'); -``` - -- Delete a not exists object - -```js -await store.delete('ossdemo/some-not-exists-object'); -``` - -- Delete a history object or deleteMarker - -```js -const versionId = 'versionId'; -await store.delete('ossdemo/some-not-exists-object', { versionId }); -``` - -### .copy(name, sourceName[, sourceBucket, options]) - -Copy an object from `sourceName` to `name`. - -parameters: - -- name {String} object name store on OSS -- sourceName {String} source object name -- [sourceBucket] {String} source Bucket. if doesn't exist,`sourceBucket` is same bucket. -- [options] {Object} optional parameters - - [versionId] {String} the version id of history object - - [timeout] {Number} the operation timeout - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - e.g.: `{ uid: 123, pid: 110 }` - If the `meta` set, will override the source object meta. - - [headers] {Object} extra headers - - 'If-Match' do copy if source object etag equal this, - otherwise throw PreconditionFailedError - - 'If-None-Match' do copy if source object etag not equal this, - otherwise throw PreconditionFailedError - - 'If-Modified-Since' do copy if source object modified after this time, - otherwise throw PreconditionFailedError - - 'If-Unmodified-Since' do copy if source object modified before this time, - otherwise throw PreconditionFailedError - - See more: [CopyObject](https://help.aliyun.com/document_detail/31979.html?#title-tzy-vxc-ncx) - -Success will return the copy result in `data` property. - -object: - -- data {Object} copy result - - lastModified {String} object last modified GMT string - - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If source object not exists, will throw NoSuchKeyError. - -example: - -- Copy same bucket object - -```js -store.copy('newName', 'oldName').then((result) => { - console.log(result); -}); -``` - -- Copy other bucket object - -```js -store.copy('logo.png', 'logo.png', 'other-bucket').then((result) => { - console.log(result); -}); -``` - -- Copy historic object - -```js -const versionId = 'your verisonId' -store.copy('logo.png', 'logo.png', 'other-bucket', { versionId }).then((result) => { - console.log(result); -}); -``` - -### .putMeta(name, meta[, options]) - -Set an exists object meta. - -parameters: - -- name {String} object name store on OSS -- meta {Object} user meta, will send with `x-oss-meta-` prefix string - e.g.: `{ uid: 123, pid: 110 }` - If `meta: null`, will clean up the exists meta -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the putMeta result in `data` property. - -- data {Object} copy result - - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If object not exists, will throw NoSuchKeyError. - -example: - -- Update exists object meta - -```js -const result = await store.putMeta('ossdemo.txt', { - uid: 1, pid: 'p123' -}); -console.log(result); -``` - -- Clean up object meta - -```js -await store.putMeta('ossdemo.txt', null); -``` - -### .deleteMulti(names[, options]) - -Delete multi objects in one request. - -parameters: - -- names {Array} object names, max 1000 objects in once. - - key {String} object name - - [versionId] {String} the version id of history object or deleteMarker -- [options] {Object} optional parameters - - [quiet] {Boolean} quiet mode or verbose mode, default is `false`, verbose mode - quiet mode: if all objects delete succes, return emtpy response. - otherwise return delete error object results. - verbose mode: return all object delete results. - - [timeout] {Number} the operation timeout - -Success will return delete success objects in `deleted` property. - -- [deleted] {Array} deleted object or deleteMarker info list - - [Key] {String} object name - - [VersionId] {String} object versionId - - [DeleteMarker] {String} generate or delete marker - - [DeleteMarkerVersionId] {String} marker versionId -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Delete multi objects in quiet mode - -```js -const result = await store.deleteMulti(['obj1', 'obj2', 'obj3'], { - quiet: true -}); -``` - -- Delete multi objects in verbose mode - -```js -const result = await store.deleteMulti(['obj1', 'obj2', 'obj3']); -``` - -- Delete multi objects in multiversion - -```js -const obj1 = { - key: 'key1', - versionId: 'versionId1' -} -const obj2 = { - key: 'key2', - versionId: 'versionId2' -} -const result = await store.deleteMulti([obj1, obj2]); -``` - -### .list(query[, options]) - -List objects in the bucket. - -parameters: - -- [query] {Object} query parameters, default is `null` - - [prefix] {String} search object using `prefix` key - - [marker] {String} search start from `marker`, including `marker` key - - [delimiter] {String} delimiter search scope - e.g. `/` only search current dir, not including subdir - - [max-keys] {String|Number} max objects, default is `100`, limit to `1000` -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return objects list on `objects` properties. - -- objects {Array} object meta info list - Each `ObjectMeta` will contains blow properties: - - name {String} object name on oss - - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` - - type {String} object type, e.g.: `Normal` - - size {Number} object size, e.g.: `344606` - - storageClass {String} storage class type, e.g.: `Standard` - - owner {Object} object owner, including `id` and `displayName` -- prefixes {Array} prefix list -- isTruncated {Boolean} truncate or not -- nextMarker {String} next marker string -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- List top 10 objects - -```js -const result = await store.list(); -console.log(result.objects); -``` - -- List `fun/` dir including subdirs objects - -```js -const result = await store.list({ - prefix: 'fun/' -}); -console.log(result.objects); -``` - -- List `fun/` dir objects, not including subdirs - -```js -const result = await store.list({ - prefix: 'fun/', - delimiter: '/' -}); -console.log(result.objects); -``` - -### .listV2(query[, options]) - -List objects in the bucket.(recommended) - -parameters: - -- [query] {Object} query parameters, default is `null` - - [prefix] {String} search object using `prefix` key - - [continuation-token] (continuationToken) {String} search start from `continuationToken`, including `continuationToken` key - - [delimiter] {String} delimiter search scope - e.g. `/` only search current dir, not including subdir - - [max-keys] {String|Number} max objects, default is `100`, limit to `1000` - - [start-after] {String} specifies the Start-after value from which to start the list. The names of objects are returned in alphabetical order. - - [fetch-owner] {Boolean} specifies whether to include the owner information in the response. -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return objects list on `objects` properties. - -- objects {Array} object meta info list - Each `ObjectMeta` will contains blow properties: - - name {String} object name on oss - - url {String} resource url - - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` - - type {String} object type, e.g.: `Normal` - - size {Number} object size, e.g.: `344606` - - storageClass {String} storage class type, e.g.: `Standard` - - owner {Object|null} object owner, including `id` and `displayName` -- prefixes {Array} prefix list -- isTruncated {Boolean} truncate or not -- nextContinuationToken {String} next continuation-token string -- keyCount {Number} The number of keys returned for this request. If Delimiter is specified, KeyCount is the sum of the elements in Key and CommonPrefixes. -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -- List top 10 objects - -```js -const result = await store.listV2({ - 'max-keys': 10 -}); -console.log(result.objects); -``` - -- List `fun/` dir including subdirs objects - -```js -const result = await store.listV2({ - prefix: 'fun/' -}); -console.log(result.objects); -``` - -- List `fun/` dir objects, not including subdirs - -```js -const result = await store.listV2({ - prefix: 'fun/', - delimiter: '/' -}); -console.log(result.objects); -``` - -- List `a/` dir objects, after `a/b` and not include `a/b` - -```js -const result = await store.listV2({ - delimiter: '/', - prefix: 'a/', - 'start-after': 'a/b' -}); -console.log(result.objects); -``` - -### .getBucketVersions(query[, options]) - -List the version information of all objects in the bucket, including the delete marker (Delete Marker). - -parameters: - -- [query] {Object} query parameters, default is `null` - - [prefix] {String} search object using `prefix` key - - [versionIdMarker] {String} set the result to return from the version ID marker of the key marker object and sort by the versions - - [keyMarker] {String} search start from `keyMarker`, including `keyMarker` key - - [encodingType] {String} specifies that the returned content is encoded, and specifies the type of encoding - - [delimiter] {String} delimiter search scope - e.g. `/` only search current dir, not including subdir - - [maxKeys] {String|Number} max objects, default is `100`, limit to `1000` -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return objects list on `objects` properties. - -- objects {Array} object meta info list - Each `ObjectMeta` will contains blow properties: - - name {String} object name on oss - - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - etag {String} object etag contains `"`, e.g.: `"5B3C1A2E053D763E1B002CC607C5A0FE"` - - type {String} object type, e.g.: `Normal` - - size {Number} object size, e.g.: `344606` - - isLatest {Boolean} - - versionId {String} object versionId - - storageClass {String} storage class type, e.g.: `Standard` - - owner {Object} object owner, including `id` and `displayName` -- deleteMarker {Array} object delete marker info list - Each `ObjectDeleteMarker` - - name {String} object name on oss - - lastModified {String} object last modified GMT date, e.g.: `2015-02-19T08:39:44.000Z` - - versionId {String} object versionId -- isTruncated {Boolean} truncate or not -- nextKeyMarker (nextMarker) {String} next marker string -- nextVersionIdMarker (NextVersionIdMarker) {String} next version ID marker string -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- View all versions of objects and deleteMarker of bucket - -```js -const result = await store.getBucketVersions(); -console.log(result.objects); -console.log(result.deleteMarker); -``` - -- List from key-marker - -```js -const result = await store.getBucketVersions({ - 'keyMarker': 'keyMarker' -}); -console.log(result.objects); -``` - -- List from the version-id-marker of key-marker - -```js -const result = await store.getBucketVersions({ - 'versionIdMarker': 'versionIdMarker', - 'keyMarker': 'keyMarker' -}); -console.log(result.objects); -console.log(result.deleteMarker); -``` - -### .signatureUrl(name[, options]) - -Create a signature url for download or upload object. When you put object with signatureUrl ,you need to pass `Content-Type`.Please look at the example. - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` - - [method] {String} the HTTP method, default is 'GET' - - [Content-Type] {String} set the request content type - - [process] {String} image process params, will send with `x-oss-process` - e.g.: `{process: 'image/resize,w_200'}` - - [trafficLimit] {Number} traffic limit, range: `819200`~`838860800`. - - [subResource] {Object} additional signature parameters in url. - - [response] {Object} set the response headers for download - - [content-type] {String} set the response content type - - [content-disposition] {String} set the response content disposition - - [cache-control] {String} set the response cache control - - See more: - - [callback] {Object} set the callback for the operation - - url {String} set the url for callback - - [host] {String} set the host for callback - - body {String} set the body for callback - - [contentType] {String} set the type for body - - [customValue] {Object} set the custom value for callback,eg. {var1: value1,var2:value2} - -Success will return signature url. - -example: - -- Get signature url for object - -```js -const url = store.signatureUrl('ossdemo.txt'); -console.log(url); -// -------------------------------------------------- -const url = store.signatureUrl('ossdemo.txt', { - expires: 3600, - method: 'PUT' -}); -console.log(url); - -// put object with signatureUrl -// ------------------------------------------------- - -const url = store.signatureUrl('ossdemo.txt', { - expires: 3600, - method: 'PUT', - 'Content-Type': 'text/plain; charset=UTF-8', -}); -console.log(url); - -// -------------------------------------------------- -const url = store.signatureUrl('ossdemo.txt', { - expires: 3600, - response: { - 'content-type': 'text/custom', - 'content-disposition': 'attachment' - } -}); -console.log(url); - -// put operation -``` - -- Get a signature url for a processed image - -```js -const url = store.signatureUrl('ossdemo.png', { - process: 'image/resize,w_200' -}); -console.log(url); -// -------------------------------------------------- -const url = store.signatureUrl('ossdemo.png', { - expires: 3600, - process: 'image/resize,w_200' -}); -console.log(url); -``` - -### .asyncSignatureUrl(name[, options]) - -Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken - -parameters: - -- name {String} object name store on OSS -- [options] {Object} optional parameters - - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` - - [method] {String} the HTTP method, default is 'GET' - - [Content-Type] {String} set the request content type - - [process] {String} image process params, will send with `x-oss-process` - e.g.: `{process: 'image/resize,w_200'}` - - [trafficLimit] {Number} traffic limit, range: `819200`~`838860800`. - - [subResource] {Object} additional signature parameters in url. - - [response] {Object} set the response headers for download - - [content-type] {String} set the response content type - - [content-disposition] {String} set the response content disposition - - [cache-control] {String} set the response cache control - - See more: - - [callback] {Object} set the callback for the operation - - url {String} set the url for callback - - [host] {String} set the host for callback - - body {String} set the body for callback - - [contentType] {String} set the type for body - - [customValue] {Object} set the custom value for callback,eg. {var1: value1,var2:value2} - -Success will return signature url. - -example: - -- Get signature url for object - -```js -const url = await store.asyncSignatureUrl('ossdemo.txt'); -console.log(url); -// -------------------------------------------------- -const url = await store.asyncSignatureUrl('ossdemo.txt', { - expires: 3600, - method: 'PUT' -}); -console.log(url); -// put object with signatureUrl -// ------------------------------------------------- -const url = await store.asyncSignatureUrl('ossdemo.txt', { - expires: 3600, - method: 'PUT', - 'Content-Type': 'text/plain; charset=UTF-8', -}); -console.log(url); -// -------------------------------------------------- -const url = await store.asyncSignatureUrl('ossdemo.txt', { - expires: 3600, - response: { - 'content-type': 'text/custom', - 'content-disposition': 'attachment' - } -}); -console.log(url); -// put operation -``` - -- Get a signature url for a processed image - -```js -const url = await store.asyncSignatureUrl('ossdemo.png', { - process: 'image/resize,w_200' -}); -console.log(url); -// -------------------------------------------------- -const url = await store.asyncSignatureUrl('ossdemo.png', { - expires: 3600, - process: 'image/resize,w_200' -}); -console.log(url); -``` - -### .putACL(name, acl[, options]) - -Set object's ACL. - -parameters: - -- name {String} object name -- acl {String} acl (private/public-read/public-read-write) -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Set an object's ACL - -```js -await store.putACL('ossdemo.txt', 'public-read'); -``` - -- Set an history object's ACL - -```js -const versionId = 'object versionId' -await store.putACL('ossdemo.txt', 'public-read', { - versionId -}); -``` - -### .getACL(name[, options]) - -Get object's ACL. - -parameters: - -- name {String} object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - -Success will return: - -- acl {String} acl settiongs string -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Get an object's ACL - -```js -const result = await store.getACL('ossdemo.txt'); -console.log(result.acl); -``` - -- Get an history object's ACL - -```js -const versionId = 'object versionId' -const result = await store.getACL('ossdemo.txt', { versionId }); -console.log(result.acl); -``` - -### .restore(name[, options]) - -Restore Object. - -parameters: - -- name {String} object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - - [type] {String} the default type is Archive - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -- Restore an object with Archive type - -```js -const result = await store.restore('ossdemo.txt'); -console.log(result.status); -``` - -- Restore an object with ColdArchive type - -```js -const result = await store.restore('ossdemo.txt',{type:'ColdArchive'}); -console.log(result.status); -``` - -- Days for unfreezing Specifies the days for unfreezing - -```js -const result = await store.restore('ossdemo.txt',{type:'ColdArchive',Days:2}); -console.log(result.status); -``` - -- Restore an history object - -```js -const versionId = 'object versionId'; -const result = await store.restore('ossdemo.txt', { versionId }); -console.log(result.status); -``` - -### .putSymlink(name, targetName[, options]) - -PutSymlink - -parameters: - -- name {String} object name -- targetName {String} target object name -- [options] {Object} optional parameters - - [storageClass] {String} the storage type include (Standard,IA,Archive) - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - - [headers] {Object} extra headers, detail see [PutSymlink](https://help.aliyun.com/document_detail/45126.html#title-x71-l2b-7i8) - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -```js -const options = { - storageClass: 'IA', - meta: { - uid: '1', - slus: 'test.html' - } -} -const result = await store.putSymlink('ossdemo.txt', 'targetName', options) -console.log(result.res) -``` - -putSymlink multiversion - -```js -const options = { - storageClass: 'IA', - meta: { - uid: '1', - slus: 'test.html' - }, -} -const result = await store.putSymlink('ossdemo.txt', 'targetName', options) -console.log(result.res.headers['x-oss-version-id']) -``` - -### .getSymlink(name[, options]) - -GetSymlink - -parameters: - -- name {String} object name -- [options] {Object} optional parameters -- [versionId] {String} the version id of history object - -Success will return - -- targetName {String} target object name -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -example: - -```js -const result = await store.getSymlink('ossdemo.txt') -console.log(result.targetName) -``` - -for history object - -```js -const versionId = 'object versionId'; -const result = await store.getSymlink('ossdemo.txt', { versionId }) -console.log(result.targetName) -``` - -### .initMultipartUpload(name[, options]) - -Before transmitting data in the Multipart Upload mode, -you must call the Initiate Multipart Upload interface to notify the OSS to initiate a Multipart Upload event. -The Initiate Multipart Upload interface returns a globally unique Upload ID created by the OSS server to identify this Multipart Upload event. - -parameters: - -- name {String} object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [mime] Mime file type e.g.: application/octet-stream - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - - [headers] {Object} extra headers - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [x-oss-server-side-encryption] - Specify the server-side encryption algorithm used to upload each part of this object,Type: string, Valid value: AES256 `x-oss-server-side-encryption: AES256` - - See more: [InitiateMultipartUpload](https://help.aliyun.com/document_detail/31992.html?#title-wh0-a2h-rur) - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - [x-oss-server-side-encryption] if set request header x-oss-server-side-encryption, will return - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name {String} object name store on OSS -- uploadId {String} upload id, use for uploadPart, completeMultipart - -example: - -```js - const result = await store.initMultipartUpload('object'); - console.log(result); -``` - -### .uploadPart(name, uploadId, partNo, file, start, end[, options]) - -After initiating a Multipart Upload event, you can upload data in parts based on the specified object name and Upload ID. - -parameters: - -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- partNo {Number} range is 1-10000, If this range is exceeded, OSS returns the InvalidArgument's error code. -- file {String} is FileName, the whole file
- Multipart Upload requires that the size of any Part other than the last Part is greater than 100KB. -- start {Number} part start bytes e.g: 102400 -- end {Number} part end bytes e.g: 204800 -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" - -example: - -```js - const name = 'object'; - const result = await store.initMultipartUpload(name); - const uploadId = result.uploadId; - const file; //the data you want to upload, is a File or FileName(only in node) - //if file part is 10 - const partSize = 100 * 1024; - const fileSize = 10 * partSize;//you need to calculate - const dones = []; - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const part = await store.uploadPart(name, uploadId, i, file, start, end); - dones.push({ - number: i, - etag: part.etag - }); - console.log(part); - } - - //end need to call completeMultipartUpload api -``` - -### .uploadPartCopy(name, uploadId, partNo, range, sourceData[, options]) - -Using Upload Part Copy, you can copy data from an existing object and upload a part of the data. -When copying a file larger than 1 GB, you must use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - -parameters: - -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- partNo {Number} range is 1-10000, If this range is exceeded, OSS returns the InvalidArgument's error code. -- range {String} Multipart Upload requires that the size of any Part other than the last Part is greater than 100KB, range value like `0-102400` -- sourceData {Object} - - sourceKey {String} the source object name - - sourceBucketName {String} the source bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [versionId] {String} the version id of history object - - [headers] {Object} The following request header is used for the source objects specified by x-oss-copy-source. - - [x-oss-copy-source-if-match] default none
- If the ETAG value of the source object is equal to the ETAG value provided by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-none-match] default none
- If the source object has not been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-unmodified-since] default none
- If the time specified by the received parameter is the same as or later than the modification time of the file, the system transfers the file normally, and returns 200 OK; otherwise, the system returns 412 Precondition Failed. - - [x-oss-copy-source-if-modified-since] default none
- If the source object has been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" - -example: - -```js - const name = 'object'; - const result = await store.initMultipartUpload(name); - - const partSize = 100 * 1024;//100kb - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const range = start + '-' + (end - 1); - const part = await store.uploadPartCopy(name, result.uploadId, i, range, { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }); - console.log(part); - } - - //end need complete api -``` - -- use history object to uploadPartCopy - -```js - const versionId = 'object versionId'; - const name = 'object'; - const result = await store.initMultipartUpload(name); - const partSize = 100 * 1024;//100kb - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const range = start + '-' + (end - 1); - const part = await store.uploadPartCopy(name, result.uploadId, i, range, { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - versionId - }); - console.log(part); - } - - //end need complete api -``` - -### .completeMultipartUpload(name, uploadId, parts[, options]) - -After uploading all data parts, you must call the Complete Multipart Upload API to complete Multipart Upload for the entire file. - -parameters: - -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- parts {Array} more part {Object} from uploadPartCopy, , each in the structure: - - number {Number} partNo - - etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers, detail see [CompleteMultipartUpload](https://help.aliyun.com/document_detail/31995.html?#title-nan-5y3-rjd) - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- data {Object} callback server response data , sdk use JSON.parse() return - -example: - -```js - - //init multipart - const name = 'object'; - const result = await store.initMultipartUpload(name); - - //upload part - const file; //the data you want to upload, this example size is 10 * 100 * 1024 - const fileSize;//you need to calculate - const partSize = 100 * 1024;//100kb - const done = []; - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const data = file.slice(start, end); - const part = store.uploadPart(name, result.uploadId, i, data, 0, data.length); - console.log(part); - done.push({ - number: i, - etag: part.res.headers.etag - }); - } - - //complete - const completeData = await store.completeMultipartUpload(name, result.uploadId, done); - console.log(completeData); -``` - -### .multipartUpload(name, file[, options]) - -Upload file with [OSS multipart][oss-multipart].
-this function contains initMultipartUpload, uploadPart, completeMultipartUpload. -When you use multipartUpload api,if you encounter problems with ConnectionTimeoutError, you should handle ConnectionTimeoutError in your business code. How to resolve ConnectionTimeoutError, you can decrease `partSize` size 、 Increase `timeout` 、Retry request , -or give tips in your business code; - -parameters: - -- name {String} object name -- file {String)|Buffer} file path or content buffer -- [options] {Object} optional args - - [parallel] {Number} the number of parts to be uploaded in parallel - - [partSize] {Number} the suggested size for each part, defalut `1024 * 1024`(1MB), minimum `100 * 1024`(100KB) - - [progress] {Function} function | async | Promise, the progress callback called after each - successful upload of one part, it will be given three parameters: - (percentage {Number}, checkpoint {Object}, res {Object}) - - [checkpoint] {Object} the checkpoint to resume upload, if this is - provided, it will continue the upload from where interrupted, - otherwise a new multipart upload will be created. - - file {File} The file object selected by the user, if the browser is restarted, it needs the user to manually trigger the settings - - name {String} object key - - fileSize {Number} file size - - partSize {Number} part size - - uploadId {String} upload id - - doneParts {Array} An array of pieces that have been completed, including the object structure as follows - - number {Number} part number - - etag {String} part etag - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - - [mime] {String} custom mime , will send with `Content-Type` entity header - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [timeout] {Number} Milliseconds before a request is considered to be timed out - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- data {Object} callback server response data, sdk use JSON.parse() return - -example: - -- Upload using multipart - -```js -const result = await store.multipartUpload('object', '/tmp/file'); -let savedCpt; -console.log(result); - -const result = await store.multipartUpload('object', '/tmp/file', { - parallel: 4, - partSize: 1024 * 1024, - progress: function (p, cpt, res) { - console.log(p); - savedCpt = cpt; - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { //progress is generator - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -``` - -- multipartUpload progress example - -```js - -//async function -async function asyncProgress(p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); -} - -const result1 = await store.multipartUpload('object', '/tmp/file', { - progress: asyncProgress -}); - -//function -function progress(p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); -} - -const result2 = await store.multipartUpload('object', '/tmp/file', { - progress: progress -}); - -``` - -- multipartUpload with abort - -```js - -//start upload -let abortCheckpoint; -store.multipartUpload('object', '/tmp/file', { - progress: function (p, cpt, res) { - abortCheckpoint = cpt; - } -}).then(res => { - // do something -}.catch(err => { - //if abort will catch abort event - if (err.name === 'abort') { - // handle abort - console.log('error: ', err.message) - } -})) - -// abort -store.abortMultipartUpload(abortCheckpoint.name, abortCheckpoint.uploadId) - -``` - -- multipartUpload with cancel - -```js - -//start upload -try { - const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - //if cancel will catch cancel event - if (store.isCancel()) { - //do something - } -} - -//the other event to cancel, for example: click event -//to cancel upload must use the same client instance -store.cancel(); - -``` - -- multipartUpload with capture `ConnectionTimeoutError` error - -```js - -//start upload -try { - const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - if (err.code === 'ConnectionTimeoutError') { - console.log("Woops,Woops ,timeout error!!!"); - // do ConnectionTimeoutError operation - } -} - -``` - -### .multipartUploadCopy(name, sourceData[, options]) - -Copy file with [OSS multipart][oss-multipart].
-this function contains head, initMultipartUpload, uploadPartCopy, completeMultipartUpload.
-When copying a file larger than 1 GB, you should use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - -parameters: - -- name {String} object name -- file {String|File} file path or HTML5 Web File -- [options] {Object} optional args - - [timeout] {Number} Milliseconds before a request is considered to be timed out - - [parallel] {Number} the number of parts to be uploaded in parallel - - [partSize] {Number} the suggested size for each part, defalut `1024 * 1024`(1MB), minimum `100 * 1024`(100KB) - - [versionId] {String} the version id of history object - - [progress] {Function} function | async | Promise, the progress callback called after each - successful upload of one part, it will be given three parameters: - (percentage {Number}, checkpoint {Object}, res {Object}) - - [checkpoint] {Object} the checkpoint to resume upload, if this is - provided, it will continue the upload from where interrupted, - otherwise a new multipart upload will be created. - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [copyheaders] {Object} only uploadPartCopy api used, detail [see](https://www.alibabacloud.com/help/doc-detail/31994.htm) - - [x-oss-copy-source-if-match] only uploadPartCopy api used, default none
- If the ETAG value of the source object is equal to the ETAG value provided by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-none-match] only uploadPartCopy api used, default none
- If the source object has not been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-unmodified-since] only uploadPartCopy api used, default none
- If the time specified by the received parameter is the same as or later than the modification time of the file, the system transfers the file normally, and returns 200 OK; otherwise, the system returns 412 Precondition Failed. - - [x-oss-copy-source-if-modified-since] only uploadPartCopy api used, default none
- If the source object has been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" - -example: - -- Copy using multipart - -```js -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}); -let savedCpt; -console.log(result); - -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - parallel: 4, - partSize: 1024 * 1024, - progress: function (p, cpt, res) { - console.log(p); - savedCpt = cpt; - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -console.log(result); - -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -console.log(result); - -``` - -- multipartUploadCopy with abort - -```js - -//start upload -let abortCheckpoint; -store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - progress: function (p, cpt, res) { - abortCheckpoint = cpt; - } -}).then(res => { - // do something -}.catch(err => { - //if abort will catch abort event - if (err.name === 'abort') { - // handle abort - console.log('error: ', err.message) - } -})) - -//the other event to abort, for example: click event -//to abort upload must use the same client instance -store.abortMultipartUpload(abortCheckpoint.name, abortCheckpoint.uploadId) - -``` - -- multipartUploadCopy with cancel - -```js - -//start upload -try { - const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - //if cancel will catch cancel event - if (store.isCancel()) { - //do something - } -} - -//the other event to cancel, for example: click event -//to cancel upload must use the same client instance -store.cancel(); - -``` - -- multipartUploadCopy with versionId - -```js - -const versionId = 'object versionId' -//start upload -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - }, - versionId -}); - -``` - -### .listParts(name, uploadId[, query, options]) - -The ListParts command can be used to list all successfully uploaded parts mapped to a specific upload ID, i.e.: those not completed and not -aborted. - -parameters: - -- name {String} object key -- uploadId {String} upload ID from initMultipartUpload api -- [query] {Object} query parameters - - [max-parts] {Number} The maximum part number in the response of the OSS. default value: 1000. - - [part-number-marker] {Number} Starting position of a specific list. A part is listed only when the part number is greater than the value of this parameter. - - [encoding-type] {String} Specify the encoding of the returned content and the encoding type. Optional value: url -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- uploadId {String} upload ID -- bucket {String} Specify the bucket name. -- name {String} object name -- PartNumberMarker {Number} Starting position of the part numbers in the listing result. -- nextPartNumberMarker {Number} If not all results are returned this time, the response request includes the NextPartNumberMarker element to indicate the value of PartNumberMarker in the next request. -- maxParts {Number} upload ID -- isTruncated {Boolean} Whether the returned result list for List Parts is truncated. The “true” indicates that not all results are returned; “false” indicates that all results are returned. -- parts {Array} The container that saves part information, each in the structure: - - PartNumber {Number} Part number. - - LastModified {Date} Time when a part is uploaded. - - ETag {String} ETag value in the content of the uploaded part. - - Size {Number} Size of the uploaded part. - -example: - -- List uploaded part - -```js - -const result = await store.listParts('objcet', 'uploadId', { - 'max-parts': 1000 -}); -console.log(result); -``` - -### .listUploads(query[, options]) - -List on-going multipart uploads, i.e.: those not completed and not -aborted. - -parameters: - -- query {Object} query parameters - - [prefix] {String} the object key prefix - - [max-uploads] {Number} the max uploads to return - - [key-marker] {String} the object key marker, if `upload-id-marker` - is not provided, return uploads with `key > marker`, otherwise - return uploads with `key >= marker && uploadId > id-marker` - - [upload-id-marker] {String} the upload id marker, must be used - **WITH** `key-marker` -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -example: - -- List on-going multipart uploads - -```js - -const result = await store.listUploads({ - 'max-uploads': 100, - 'key-marker': 'my-object', - 'upload-id-marker': 'upload-id' -}); -console.log(result); -``` - -### .abortMultipartUpload(name, uploadId[, options]) - -Abort a multipart upload for object. - -parameters: - -- name {String} the object name -- uploadId {String} the upload id -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -example: - -- Abort a multipart upload - -```js -const result = await store.abortMultipartUpload('object', 'upload-id'); -console.log(result); -``` - -### .calculatePostSignature(policy) - -get postObject params - -parameters: - -- policy {JSON or Object} policy must contain expiration and conditions. - -Success will return postObject Api params. - -Object: - -- OSSAccessKeyId {String} -- Signature {String} -- policy {Object} response info - -### .getObjectTagging(name[, options]) - -Obtains the tags of an object. - -parameters: - -- name {String} the object name -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- tag {Object} the tag of object -- res {Object} response info - -### .putObjectTagging(name, tag[, options]) - -Configures or updates the tags of an object. - -parameters: - -- name {String} the object name -- tag {Object} tag, eg. `{var1: value1,var2:value2}` -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -### .deleteObjectTagging(name[, options]) - -Deletes the tag of a specified object. - -parameters: - -- name {String} the object name -- tag {Object} tag, eg. `{var1: value1,var2:value2}` -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -### .processObjectSave(sourceObject, targetObject, process[, targetBucket]) - -Persistency indicates that images are asynchronously stored in the specified Bucket - -parameters: - -- sourceObject {String} source object name -- targetObject {String} target object name -- process {String} process string -- [targetBucket] {String} target bucket - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -```js -const sourceObject = 'a.png' -const targetObject = 'b.png' -const process = 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00' - -await this.store.processObjectSave(sourceObject, targetObject, process); -``` - -## RTMP Operations - -All operations function is [async], except `getRtmpUrl`. - -async function format: `async functionName(...)`. - -### .putChannel(id, conf[, options]) - -Create a live channel. - -parameters: - -- id {String} the channel id -- conf {Object} the channel config - - [Description] {String} the channel description - - [Status] {String} the channel status: 'enabled' or 'disabled' - - [Target] {Object} - - [Type] {String} the data type for the channel, only 'HLS' is supported now - - [FragDuration] {Number} duration of a 'ts' segment - - [FragCount] {Number} the number of 'ts' segments in a 'm3u8' - - [PlaylistName] {String} the 'm3u8' name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel information. - -object: - -- publishUrls {Array} the publish urls -- playUrls {Array} the play urls -- res {Object} response info - -example: - -- Create a live channel - -```js -const cid = 'my-channel'; -const conf = { - Description: 'this is channel 1', - Status: 'enabled', - Target: { - Type: 'HLS', - FragDuration: '10', - FragCount: '5', - PlaylistName: 'playlist.m3u8' - } -}; - -const r = await this.store.putChannel(cid, conf); -console.log(r); -``` - -### .getChannel(id[, options]) - -Get live channel info. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel information. - -object: - -- data {Object} channel info, same as conf in [.putChannel](#putchannelid-conf-options) -- res {Object} response info - -example: - -- Get live channel info - -```js -const cid = 'my-channel'; - -const r = await this.store.getChannel(cid); -console.log(r); -``` - -### .deleteChannel(id[, options]) - -Delete a live channel. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response infomation. - -object: - -- res {Object} response info - -example: - -- Delete a live channel - -```js -const cid = 'my-channel'; - -const r = await this.store.deleteChannel(cid); -console.log(r); -``` - -### .putChannelStatus(id, status[, options]) - -Change the live channel status. - -parameters: - -- id {String} the channel id -- status {String} the status: 'enabled' or 'disabled' -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response information. - -object: - -- res {Object} response info - -example: - -- Disable a live channel - -```js -const cid = 'my-channel'; - -const r = await this.store.putChannelStatus(cid, 'disabled'); -console.log(r); -``` - -### .getChannelStatus(id[, options]) - -Get the live channel status. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel status information. - -object: - -- data {Object} - - Status {String} the channel status: 'Live' or 'Idle' - - [ConnectedTime] {String} the connected time of rtmp pushing - - [RemoteAddr] {String} the remote addr of rtmp pushing - - [Video] {Object} the video parameters (Width/Height/FrameRate/Bandwidth/Codec) - - [Audio] {Object} the audio parameters (Bandwidth/SampleRate/Codec) -- res {Object} response info - -example: - -- Get a live channel status - -```js -const cid = 'my-channel'; - -const r = await this.store.getChannelStatus(cid); -console.log(r); - -// { Status: 'Live', -// ConnectedTime: '2016-04-12T11:51:03.000Z', -// RemoteAddr: '42.120.74.98:53931', -// Video: -// { Width: '672', -// Height: '378', -// FrameRate: '29', -// Bandwidth: '60951', -// Codec: 'H264' }, -// Audio: { Bandwidth: '5959', SampleRate: '22050', Codec: 'AAC' } -// } -``` - -### .listChannels(query[, options]) - -List channels. - -parameters: - -- query {Object} parameters for list - - prefix {String}: the channel id prefix (returns channels with this prefix) - - marker {String}: the channle id marker (returns channels after this id) - - max-keys {Number}: max number of channels to return -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel list. - -object: - -- channels {Array} the channels, each in the structure: - - Name {String} the channel id - - Description {String} the channel description - - Status {String} the channel status - - LastModified {String} the last modification time of the channel - - PublishUrls {Array} the publish urls for the channel - - PlayUrls {Array} the play urls for the channel -- nextMarker: result.data.NextMarker || null, -- isTruncated: result.data.IsTruncated === 'true' -- res {Object} response info +Success will return signature url. example: -- List live channels +- Get signature url for object ```js -const r = await this.store.listChannels({ - prefix: 'my-channel', - 'max-keys': 3 +const url = store.signatureUrl('ossdemo.txt'); +console.log(url); +// -------------------------------------------------- +const url = store.signatureUrl('ossdemo.txt', { + expires: 3600, + method: 'PUT' }); -console.log(r); -``` - -### .getChannelHistory(id[, options]) - -Get the live channel history. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the history information. - -object: - -- records {Object} the pushing records, each in the structure: - - StartTime {String} the start time - - EndTime {String} the end time - - RemoteAddr {String} the remote addr -- res {Object} response info +console.log(url); -example: +// put object with signatureUrl +// ------------------------------------------------- -- Get the live channel history +const url = store.signatureUrl('ossdemo.txt', { + expires: 3600, + method: 'PUT', + 'Content-Type': 'text/plain; charset=UTF-8', +}); +console.log(url); -```js -const cid = 'my-channel'; +// -------------------------------------------------- +const url = store.signatureUrl('ossdemo.txt', { + expires: 3600, + response: { + 'content-type': 'text/custom', + 'content-disposition': 'attachment' + } +}); +console.log(url); -const r = await this.store.getChannelHistory(cid); -console.log(r); +// put operation ``` -### .createVod(id, name, time[, options]) - -Create a VOD playlist for the channel. - -parameters: - -- id {String} the channel id -- name {String} the playlist name -- time {Object} the duration time - - startTime {Number} the start time in epoch seconds - - endTime {Number} the end time in epoch seconds -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response information. - -object: - -- res {Object} response info - -example: - -- Create a vod playlist of a live channel +- Get a signature url for a processed image ```js -const cid = 'my-channel'; - -const r = await this.store.createVod(cid, 're-play', { - startTime: 1460464870, - endTime: 1460465877 +const url = store.signatureUrl('ossdemo.png', { + process: 'image/resize,w_200' +}); +console.log(url); +// -------------------------------------------------- +const url = store.signatureUrl('ossdemo.png', { + expires: 3600, + process: 'image/resize,w_200' }); -console.log(r); +console.log(url); ``` -### .getRtmpUrl(channelId[, options]) +### .asyncSignatureUrl(name[, options]) -Get signatured rtmp url for publishing. +Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken parameters: -- channelId {String} the channel id +- name {String} object name store on OSS - [options] {Object} optional parameters - - [expires] {Number} the expire time in seconds of the url - - [params] {Object} the additional paramters for url, e.g.: {playlistName: 'play.m3u8'} - - [timeout] {Number} the operation timeout + - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` + - [method] {String} the HTTP method, default is 'GET' + - [Content-Type] {String} set the request content type + - [process] {String} image process params, will send with `x-oss-process` + e.g.: `{process: 'image/resize,w_200'}` + - [trafficLimit] {Number} traffic limit, range: `819200`~`838860800`. + - [subResource] {Object} additional signature parameters in url. + - [response] {Object} set the response headers for download + - [content-type] {String} set the response content type + - [content-disposition] {String} set the response content disposition + - [cache-control] {String} set the response cache control + - See more: + - [callback] {Object} set the callback for the operation + - url {String} set the url for callback + - [host] {String} set the host for callback + - body {String} set the body for callback + - [contentType] {String} set the type for body + - [customValue] {Object} set the custom value for callback,eg. {var1: value1,var2:value2} -Success will return the rtmp url. +Success will return signature url. example: -- Get a rtmp url. +- Get signature url for object ```js -const cid = 'my-channel'; - -const url = this.store.getRtmpUrl(this.cid, { - params: { - playlistName: 'play.m3u8' - }, - expires: 3600 +const url = await store.asyncSignatureUrl('ossdemo.txt'); +console.log(url); +// -------------------------------------------------- +const url = await store.asyncSignatureUrl('ossdemo.txt', { + expires: 3600, + method: 'PUT' +}); +console.log(url); +// put object with signatureUrl +// ------------------------------------------------- +const url = await store.asyncSignatureUrl('ossdemo.txt', { + expires: 3600, + method: 'PUT', + 'Content-Type': 'text/plain; charset=UTF-8', +}); +console.log(url); +// -------------------------------------------------- +const url = await store.asyncSignatureUrl('ossdemo.txt', { + expires: 3600, + response: { + 'content-type': 'text/custom', + 'content-disposition': 'attachment' + } }); console.log(url); -// rtmp://ossliveshow.oss-cn-hangzhou.aliyuncs.com/live/tl-channel?OSSAccessKeyId=T0cqQWBk2ThfRS6m&Expires=1460466188&Signature=%2BnzTtpyxUWDuQn924jdS6b51vT8%3D +// put operation ``` -## Create A Image Service Instance - -Each Image Service instance required `accessKeyId`, `accessKeySecret`, `bucket` and `imageHost`. - -### oss.ImageClient(options) - -Create a Image service instance. - -options: - -- imageHost {String} your image service domain that binding to a OSS bucket -- accessKeyId {String} access key you create on aliyun console website -- accessKeySecret {String} access secret you create -- bucket {String} the default bucket you want to access - If you don't have any bucket, please use `putBucket()` create one first. -- [region] {String} the bucket data region location, please see [Data Regions](#data-regions), - default is `oss-cn-hangzhou` - Current available: `oss-cn-hangzhou`, `oss-cn-qingdao`, `oss-cn-beijing`, `oss-cn-hongkong` and `oss-cn-shenzhen` -- [internal] {Boolean} access OSS with aliyun internal network or not, default is `false` - If your servers are running on aliyun too, you can set `true` to save lot of money. -- [timeout] {String|Number} instance level timeout for all operations, default is `60s` - -example: +- Get a signature url for a processed image ```js -const { Client } = require('oss-client'); - -const imgClient = oss.ImageClient({ - accessKeyId: 'your access key', - accessKeySecret: 'your access secret', - bucket: 'my_image_bucket' - imageHost: 'thumbnail.myimageservice.com' +const url = await store.asyncSignatureUrl('ossdemo.png', { + process: 'image/resize,w_200' +}); +console.log(url); +// -------------------------------------------------- +const url = await store.asyncSignatureUrl('ossdemo.png', { + expires: 3600, + process: 'image/resize,w_200' }); +console.log(url); ``` -## Image Operations - -All operations function is [async], except `imgClient.signatureUrl`. - -async function format: `async functionName(...)`. - -### imgClient.get(name, file[, options]) +### .putACL(name, acl[, options]) -Get an image from the image channel. +Set object's ACL. parameters: -- name {String} image object name with operation style store on OSS -- [file] {String|WriteStream} file path or WriteStream instance to store the image - If `file` is null or ignore this parameter, function will return info contains `content` property. +- name {String} object name +- acl {String} acl (private/public-read/public-read-write) - [options] {Object} optional parameters - [timeout] {Number} the operation timeout - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the info contains response. + - [versionId] {String} the version id of history object -object: +Success will return: -- [content] {Buffer} file content buffer if `file` parameter is null or ignore - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -If object not exists, will throw NoSuchKeyError. - example: -- Get an exists image with a style and store it to the local file - -```js -const imagepath = '/home/ossdemo/demo.jpg'; -await imgClient.get('ossdemo/demo.jpg@200w_200h', filepath); -``` - -_ Store image to a writestream - -```js -await imgClient.get('ossdemo/demo.jpg@200w_200h', somestream); -``` - -- Get an image content buffer +- Set an object's ACL ```js -const result = await imgClient.get('ossdemo/demo.jpg@200w_200h'); -console.log(Buffer.isBuffer(result.content)); +await store.putACL('ossdemo.txt', 'public-read'); ``` -- Get a not exists object or a not image object +- Set an history object's ACL ```js -const imagepath = '/home/ossdemo/demo.jpg'; -await imgClient.get('ossdemo/not-exists-demo.jpg@200w_200h', filepath); -// will throw NoSuchKeyError +const versionId = 'object versionId' +await store.putACL('ossdemo.txt', 'public-read', { + versionId +}); ``` -### imgClient.getStream(name[, options]) +### .getACL(name[, options]) -Get an image read stream. +Get object's ACL. parameters: -- name {String} image object name with operation style store on OSS +- name {String} object name - [options] {Object} optional parameters - [timeout] {Number} the operation timeout - - [headers] {Object} extra headers - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the stream instance and response info. + - [versionId] {String} the version id of history object -object: +Success will return: -- stream {ReadStream} readable stream instance - if response status is not 200, stream will be `null`. +- acl {String} acl settiongs string - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -If object not exists, will throw NoSuchKeyError. - example: -- Get an exists image object stream +- Get an object's ACL + +```js +const result = await store.getACL('ossdemo.txt'); +console.log(result.acl); +``` + +- Get an history object's ACL ```js -const result = await imgClient.getStream('ossdemo/demo.jpg@200w_200h'); -result.stream.pipe(fs.createWriteStream('some demo.jpg')); +const versionId = 'object versionId' +const result = await store.getACL('ossdemo.txt', { versionId }); +console.log(result.acl); ``` -### imgClient.getExif(name[, options]) +### .restore(name[, options]) -Get a image exif info by image object name from the image channel. +Restore Object. parameters: -- name {String} image object name +- name {String} object name - [options] {Object} optional parameters - [timeout] {Number} the operation timeout + - [versionId] {String} the version id of history object + - [type] {String} the default type is Archive -Success will return the info contains response. - -object: +Success will return: - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -- data {Object} image exif object - -If object don't have exif, will throw 400 BadRequest. example: +- Restore an object with Archive type + +```js +const result = await store.restore('ossdemo.txt'); +console.log(result.status); +``` + +- Restore an object with ColdArchive type + +```js +const result = await store.restore('ossdemo.txt',{type:'ColdArchive'}); +console.log(result.status); +``` + +- Days for unfreezing Specifies the days for unfreezing + ```js -const result = await imgClient.getExif('demo.jpg'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/json", -// content - length: "148", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 11:06:32 GMT", -// "last-modified": "Mon, 30 Mar 2015 10:46:35 GMT" -// }, -// size: 148, -// aborted: false, -// rt: 461, -// keepAliveSocket: false -// }, -// data: { -// FileSize: 343683, -// ImageHeight: 1200, -// ImageWidth: 1600, -// Orientation: 1 -// } -// } +const result = await store.restore('ossdemo.txt',{type:'ColdArchive',Days:2}); +console.log(result.status); +``` + +- Restore an history object +```js +const versionId = 'object versionId'; +const result = await store.restore('ossdemo.txt', { versionId }); +console.log(result.status); ``` -### imgClient.getInfo(name[, options]) +### .putSymlink(name, targetName[, options]) -Get a image info and exif info by image object name from the image channel. +PutSymlink parameters: -- name {String} image object name +- name {String} object name +- targetName {String} target object name - [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. - -object: + - [storageClass] {String} the storage type include (Standard,IA,Archive) + - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string + - [headers] {Object} extra headers, detail see [PutSymlink](https://help.aliyun.com/document_detail/45126.html#title-x71-l2b-7i8) - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -- data {Object} image exif object example: ```js -const result = await imgClient.getInfo('demo.jpg'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/json", -// content - length: "148", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 11:06:32 GMT", -// "last-modified": "Mon, 30 Mar 2015 10:46:35 GMT" -// }, -// size: 148, -// aborted: false, -// rt: 461, -// keepAliveSocket: false -// }, -// data: { -// FileSize: 343683, -// Format: "jpg", -// ImageHeight: 1200, -// ImageWidth: 1600, -// Orientation: 1 -// } -// } - +const options = { + storageClass: 'IA', + meta: { + uid: '1', + slus: 'test.html' + } +} +const result = await store.putSymlink('ossdemo.txt', 'targetName', options) +console.log(result.res) ``` -### imgClient.putStyle(name, style[, options]) +putSymlink multiversion -// TODO +```js +const options = { + storageClass: 'IA', + meta: { + uid: '1', + slus: 'test.html' + }, +} +const result = await store.putSymlink('ossdemo.txt', 'targetName', options) +console.log(result.res.headers['x-oss-version-id']) +``` -### imgClient.getStyle(name[, options]) +### .getSymlink(name[, options]) -Get a style by name from the image channel. +GetSymlink parameters: -- name {String} image style name +- name {String} object name - [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. +- [versionId] {String} the version id of history object -object: +Success will return +- targetName {String} target object name - res {Object} response info, including - status {Number} response status - headers {Object} response headers - size {Number} response size - rt {Number} request total use time (ms) -- data {Object} styles object - - Name {String} style name - - Content {String} style content - - CreateTime {String} style create time - - LastModifyTime {String} style last modify time example: ```js -const result = await imgClient.getStyle('400'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/xml", -// content - length: "234", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 10:58:20 GMT" -// }, -// size: 234, -// aborted: false, -// rt: 398, -// keepAliveSocket: false -// }, -// data: { -// Name: "400", -// Content: "400w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:34:21 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:34:21 GMT" -// } -// } +const result = await store.getSymlink('ossdemo.txt') +console.log(result.targetName) +``` + +for history object + +```js +const versionId = 'object versionId'; +const result = await store.getSymlink('ossdemo.txt', { versionId }) +console.log(result.targetName) ``` -### imgClient.listStyle([options]) +### .calculatePostSignature(policy) -Get all styles from the image channel. +get postObject params parameters: -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +- policy {JSON or Object} policy must contain expiration and conditions. -Success will return the info contains response. +Success will return postObject Api params. -object: +Object: -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- data {Array} styles array, a style object: - - Name {String} style name - - Content {String} style content - - CreateTime {String} style create time - - LastModifyTime {String} style last modify time +- OSSAccessKeyId {String} +- Signature {String} +- policy {Object} response info -example: +### .getObjectTagging(name[, options]) -```js -const result = await imgClient.listStyle(); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/xml", -// content - length: "913", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 10:47:32 GMT" -// }, -// size: 913, -// aborted: false, -// rt: 1911, -// keepAliveSocket: false -// }, -// data: [{ -// Name: "200-200", -// Content: "0e_200w_200h_0c_0i_0o_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:28:08 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:28:08 GMT" -// }, { -// Name: "800", -// Content: "800w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:29:15 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:29:15 GMT" -// }, { -// Name: "400", -// Content: "400w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:34:21 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:34:21 GMT" -// }, { -// Name: "600", -// Content: "600w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:35:02 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:35:02 GMT" -// }] -// } -``` +Obtains the tags of an object. + +parameters: + +- name {String} the object name +- [options] {Object} optional args + - [versionId] {String} the version id of history object + +Success will return the channel information. -### imgClient.deleteStyle(name[, options]) +object: -// TODO +- tag {Object} the tag of object +- res {Object} response info -### imgClient.signatureUrl(name) +### .putObjectTagging(name, tag[, options]) -Create a signature url for directly download. +Configures or updates the tags of an object. parameters: -- name {String} image object name with operation style store on OSS -- [options] {Object} optional parameters - - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` - - [timeout] {Number} the operation timeout +- name {String} the object name +- tag {Object} tag, eg. `{var1: value1,var2:value2}` +- [options] {Object} optional args + - [versionId] {String} the version id of history object -Success will return full signature url. +Success will return the channel information. -example: +object: -```js -const url = imgClient.signatureUrl(' -'); -// http://thumbnail.myimageservice.com/demo.jpg@200w_200h?OSSAccessKeyId=uZxyLARzYZtGwHKY&Expires=1427803849&Signature=JSPRe06%2FjQpQSj5zlx2ld1V%2B35I%3D -``` +- status {Number} response status +- res {Object} response info -## Cluster Mode +### .deleteObjectTagging(name[, options]) -Cluster mode now only support object operations. +Deletes the tag of a specified object. -```js -const Cluster = require('oss-client').ClusterClient; - -const client = Cluster({ - cluster: [{ - host: 'host1', - accessKeyId: 'id1', - accessKeySecret: 'secret1' - }, { - host: 'host2', - accessKeyId: 'id2', - accessKeySecret: 'secret2' - }], - schedule: 'masterSlave', //default is `roundRobin` -}); +parameters: -// listen error event to logging error -client.on('error', function(err) { - console.error(err.stack); -}); +- name {String} the object name +- tag {Object} tag, eg. `{var1: value1,var2:value2}` +- [options] {Object} optional args + - [versionId] {String} the version id of history object -// client init ready -client.ready(function() { - console.log('cluster client init ready, go ahead!'); -}); -``` +Success will return the channel information. + +object: + +- status {Number} response status +- res {Object} response info + +### .processObjectSave(sourceObject, targetObject, process[, targetBucket]) + +Persistency indicates that images are asynchronously stored in the specified Bucket -### Get Methods +parameters: + +- sourceObject {String} source object name +- targetObject {String} target object name +- process {String} process string +- [targetBucket] {String} target bucket -Will choose an alive client by schedule(`masterSlave` or `roundRobin`). +Success will return the channel information. -- `client.get()` -- `client.head()` -- `client.getStream()` -- `client.list()` -- `client.signatureUrl()` -- `client.chooseAvailable()` - choose an available client by schedule. -- `client.getACL()` +object: -### Put Methods +- status {Number} response status +- res {Object} response info -Will put to all clients. +```js +const sourceObject = 'a.png' +const targetObject = 'b.png' +const process = 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00' -- `client.put()` -- `client.putStream()` -- `client.delete()` -- `client.deleteMulti()` -- `client.copy()` -- `client.putMeta()` -- `client.putACL()` -- `client.restore()` +await this.store.processObjectSave(sourceObject, targetObject, process); +``` ## Known Errors @@ -4417,54 +1580,68 @@ The following table lists the OSS error codes: [More code info](https://help.aliyun.com/knowledge_detail/32005.html) -name | code | status | message | message in Chinese ---- | --- | --- | --- | --- -AccessDeniedError | AccessDenied | 403 | Access Denied | 拒绝访问 -BucketAlreadyExistsError | BucketAlreadyExists | 409 | Bucket already exists | Bucket 已经存在 -BucketNotEmptyError | BucketNotEmpty | 409 | Bucket is not empty | Bucket 不为空 -RestoreAlreadyInProgressError | RestoreAlreadyInProgress | 409 | The restore operation is in progress. | restore 操作正在进行中 -OperationNotSupportedError | OperationNotSupported | 400 | The operation is not supported for this resource | 该资源暂不支持restore操作 -EntityTooLargeError | EntityTooLarge | 400 | Entity too large | 实体过大 -EntityTooSmallError | EntityTooSmall | 400 | Entity too small | 实体过小 -FileGroupTooLargeError | FileGroupTooLarge | 400 | File group too large | 文件组过大 -InvalidLinkNameError | InvalidLinkName | 400 | Link name can't be the same as the object name | Object Link 与指向的 Object 同名 -LinkPartNotExistError | LinkPartNotExist | 400 | Can't link to not exists object | Object Link 中指向的 Object 不存在 -ObjectLinkTooLargeError | ObjectLinkTooLarge | 400 | Too many links to this object | Object Link 中 Object 个数过多 -FieldItemTooLongError | FieldItemTooLong | 400 | Post form fields items too large | Post 请求中表单域过大 -FilePartInterityError | FilePartInterity | 400 | File part has changed | 文件 Part 已改变 -FilePartNotExistError | FilePartNotExist | 400 | File part not exists | 文件 Part 不存在 -FilePartStaleError | FilePartStale| 400 | File part stale | 文件 Part 过时 -IncorrectNumberOfFilesInPOSTRequestError | IncorrectNumberOfFilesInPOSTRequest | 400 | Post request contains invalid number of files | Post 请求中文件个数非法 -InvalidArgumentError | InvalidArgument | 400 | Invalid format argument | 参数格式错误 -InvalidAccessKeyIdError | InvalidAccessKeyId | 400 | Access key id not exists | Access Key ID 不存在 -InvalidBucketNameError | InvalidBucketName | 400 | Invalid bucket name | 无效的 Bucket 名字 -InvalidDigestError | InvalidDigest | 400 | Invalid digest | 无效的摘要 -InvalidEncryptionAlgorithmError | InvalidEncryptionAlgorithm | 400 | Invalid encryption algorithm | 指定的熵编码加密算法错误 -InvalidObjectNameError | InvalidObjectName | 400 | Invalid object name | 无效的 Object 名字 -InvalidPartError | InvalidPart | 400 | Invalid part | 无效的 Part -InvalidPartOrderError | InvalidPartOrder | 400 | Invalid part order | 无效的 part 顺序 -InvalidPolicyDocumentError | InvalidPolicyDocument | 400 | Invalid policy document | 无效的 Policy 文档 -InvalidTargetBucketForLoggingError | InvalidTargetBucketForLogging | 400 | Invalid bucket on logging operation | Logging 操作中有无效的目标 bucket -InternalError | Internal | 500 | OSS server internal error | OSS 内部发生错误 -MalformedXMLError | MalformedXML | 400 | Malformed XML format | XML 格式非法 -MalformedPOSTRequestError | MalformedPOSTRequest | 400 | Invalid post body format | Post 请求的 body 格式非法 -MaxPOSTPreDataLengthExceededError | MaxPOSTPreDataLengthExceeded | 400 | Post extra data too large | Post 请求上传文件内容之外的 body 过大 -MethodNotAllowedError | MethodNotAllowed | 405 | Not allowed method | 不支持的方法 -MissingArgumentError | MissingArgument | 411 | Missing argument | 缺少参数 -MissingContentLengthError | MissingContentLength | 411 | Missing `Content-Length` header | 缺少内容长度 -NoSuchBucketError | NoSuchBucket | 404 | Bucket not exists | Bucket 不存在 -NoSuchKeyError | NoSuchKey | 404 | Object not exists | 文件不存在 -NoSuchUploadError | NoSuchUpload | 404 | Multipart upload id not exists | Multipart Upload ID 不存在 -NotImplementedError | NotImplemented | 501 | Not implemented | 无法处理的方法 -PreconditionFailedError | PreconditionFailed | 412 | Pre condition failed | 预处理错误 -RequestTimeTooSkewedError | RequestTimeTooSkewed | 403 | Request time exceeds 15 minutes to server time | 发起请求的时间和服务器时间超出 15 分钟 -RequestTimeoutError | RequestTimeout | 400 | Request timeout | 请求超时 -RequestIsNotMultiPartContentError | RequestIsNotMultiPartContent | 400 | Invalid post content-type | Post 请求 content-type 非法 -SignatureDoesNotMatchError | SignatureDoesNotMatch | 403 | Invalid signature | 签名错误 -TooManyBucketsError | TooManyBuckets | 400 | Too many buckets on this user | 用户的 Bucket 数目超过限制 -RequestError | RequestError | -1 | network error | 网络出现中断或异常 -ConnectionTimeoutError | ConnectionTimeoutError | -2 | request connect timeout | 请求连接超时 -SecurityTokenExpiredError | SecurityTokenExpired | 403 | sts Security Token Expired | sts Security Token 超时失效 - -[oss-sts]: https://help.aliyun.com/document_detail/oss/practice/ram_guide.html -[oss-multipart]: https://help.aliyun.com/document_detail/31992.html +code | status | message | message in Chinese +--- | --- | --- | --- +AccessDenied | 403 | Access Denied | 拒绝访问 +BucketAlreadyExists | 409 | Bucket already exists | Bucket 已经存在 +BucketNotEmpty | 409 | Bucket is not empty | Bucket 不为空 +RestoreAlreadyInProgress | 409 | The restore operation is in progress. | restore 操作正在进行中 +OperationNotSupported | 400 | The operation is not supported for this resource | 该资源暂不支持restore操作 +EntityTooLarge | 400 | Entity too large | 实体过大 +EntityTooSmall | 400 | Entity too small | 实体过小 +FileGroupTooLarge | 400 | File group too large | 文件组过大 +InvalidLinkName | 400 | Link name can't be the same as the object name | Object Link 与指向的 Object 同名 +LinkPartNotExist | 400 | Can't link to not exists object | Object Link 中指向的 Object 不存在 +ObjectLinkTooLarge | 400 | Too many links to this object | Object Link 中 Object 个数过多 +FieldItemTooLong | 400 | Post form fields items too large | Post 请求中表单域过大 +FilePartInterity | 400 | File part has changed | 文件 Part 已改变 +FilePartNotExist | 400 | File part not exists | 文件 Part 不存在 +FilePartStale| 400 | File part stale | 文件 Part 过时 +IncorrectNumberOfFilesInPOSTRequest | 400 | Post request contains invalid number of files | Post 请求中文件个数非法 +InvalidArgument | 400 | Invalid format argument | 参数格式错误 +InvalidAccessKeyId | 400 | Access key id not exists | Access Key ID 不存在 +InvalidBucketName | 400 | Invalid bucket name | 无效的 Bucket 名字 +InvalidDigest | 400 | Invalid digest | 无效的摘要 +InvalidEncryptionAlgorithm | 400 | Invalid encryption algorithm | 指定的熵编码加密算法错误 +InvalidObjectName | 400 | Invalid object name | 无效的 Object 名字 +InvalidPart | 400 | Invalid part | 无效的 Part +InvalidPartOrder | 400 | Invalid part order | 无效的 part 顺序 +InvalidPolicyDocument | 400 | Invalid policy document | 无效的 Policy 文档 +InvalidTargetBucketForLogging | 400 | Invalid bucket on logging operation | Logging 操作中有无效的目标 bucket +Internal | 500 | OSS server internal error | OSS 内部发生错误 +MalformedXML | 400 | Malformed XML format | XML 格式非法 +MalformedPOSTRequest | 400 | Invalid post body format | Post 请求的 body 格式非法 +MaxPOSTPreDataLengthExceeded | 400 | Post extra data too large | Post 请求上传文件内容之外的 body 过大 +MethodNotAllowed | 405 | Not allowed method | 不支持的方法 +MissingArgument | 411 | Missing argument | 缺少参数 +MissingContentLength | 411 | Missing `Content-Length` header | 缺少内容长度 +NoSuchBucket | 404 | Bucket not exists | Bucket 不存在 +NoSuchKey | 404 | Object not exists | 文件不存在 +NoSuchUpload | 404 | Multipart upload id not exists | Multipart Upload ID 不存在 +NotImplemented | 501 | Not implemented | 无法处理的方法 +PreconditionFailed | 412 | Pre condition failed | 预处理错误 +RequestTimeTooSkewed | 403 | Request time exceeds 15 minutes to server time | 发起请求的时间和服务器时间超出 15 分钟 +RequestTimeout | 400 | Request timeout | 请求超时 +RequestIsNotMultiPartContent | 400 | Invalid post content-type | Post 请求 content-type 非法 +SignatureDoesNotMatch | 403 | Invalid signature | 签名错误 +TooManyBuckets | 400 | Too many buckets on this user | 用户的 Bucket 数目超过限制 +RequestError | -1 | network error | 网络出现中断或异常 +ConnectionTimeoutError | -2 | request connect timeout | 请求连接超时 +SecurityTokenExpired | 403 | sts Security Token Expired | sts Security Token 超时失效 + + + +## Contributors + +|[
PeterRao](https://github.com/PeterRao)
|[
rockuw](https://github.com/rockuw)
|[
fengmk2](https://github.com/fengmk2)
|[
dead-horse](https://github.com/dead-horse)
|[
taotao7](https://github.com/taotao7)
|[
weiyie](https://github.com/weiyie)
| +| :---: | :---: | :---: | :---: | :---: | :---: | +|[
binghaiwang](https://github.com/binghaiwang)
|[
greenkeeperio-bot](https://github.com/greenkeeperio-bot)
|[
luozhang002](https://github.com/luozhang002)
|[
beajer](https://github.com/beajer)
|[
mars-coder](https://github.com/mars-coder)
|[
duan007a](https://github.com/duan007a)
| +|[
Ari1c](https://github.com/Ari1c)
|[
Pedestrian93](https://github.com/Pedestrian93)
|[
microJ](https://github.com/microJ)
|[
aloisklink](https://github.com/aloisklink)
|[
popomore](https://github.com/popomore)
|[
semantic-release-bot](https://github.com/semantic-release-bot)
| +|[
1019272778](https://github.com/1019272778)
|[
zensh](https://github.com/zensh)
|[
fool2fish](https://github.com/fool2fish)
|[
AviVahl](https://github.com/AviVahl)
|[
danielwpz](https://github.com/danielwpz)
|[
tianniu0106](https://github.com/tianniu0106)
| +|[
JacksonTian](https://github.com/JacksonTian)
|[
jackytck](https://github.com/jackytck)
|[
leoliew](https://github.com/leoliew)
|[
lfeng](https://github.com/lfeng)
|[
snyk-bot](https://github.com/snyk-bot)
|[
yupeng-yuxiaoyu](https://github.com/yupeng-yuxiaoyu)
| +|[
sartoshi-foot-dao](https://github.com/sartoshi-foot-dao)
|[
chay-xu](https://github.com/chay-xu)
|[
chunpu](https://github.com/chunpu)
|[
dependabot[bot]](https://github.com/apps/dependabot)
|[
duncup](https://github.com/duncup)
|[
qin](https://github.com/qin)
| +[
rdwh](https://github.com/rdwh)
|[
richex-cn](https://github.com/richex-cn)
|[
hengshanMWC](https://github.com/hengshanMWC)
+ +This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Sat Sep 16 2023 01:02:55 GMT+0800`. + + diff --git a/index.d.ts b/index.d.ts deleted file mode 100644 index 0fca45132..000000000 --- a/index.d.ts +++ /dev/null @@ -1,1082 +0,0 @@ -// Forked from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/ali-oss/index.d.ts - -import { Readable, Writable } from 'stream'; -import { - IObjectSimple, - GetObjectOptions, - ListObjectsQuery, - RequestOptions, - ListObjectResult, - PutObjectOptions, - PutObjectResult, - NormalSuccessResponse, - HeadObjectOptions, - HeadObjectResult, - GetObjectResult, - GetStreamOptions, - GetStreamResult, - CopyObjectOptions, - CopyAndPutMetaResult, - StorageType, - OwnerType, - UserMeta, - ObjectCallback, -} from 'oss-interface'; - -export * from 'oss-interface'; - -export interface ClientOptions { - /** access secret you create */ - accessKeyId: string; - /** access secret you create */ - accessKeySecret: string; - /** used by temporary authorization */ - stsToken?: string | undefined; - /** the default bucket you want to access If you don't have any bucket, please use putBucket() create one first. */ - bucket?: string | undefined; - /** oss region domain. It takes priority over region. */ - endpoint?: string | undefined; - /** the bucket data region location, please see Data Regions, default is oss-cn-hangzhou. */ - region?: string | undefined; - /** access OSS with aliyun internal network or not, default is false. If your servers are running on aliyun too, you can set true to save lot of money. */ - internal?: boolean | undefined; - /** instruct OSS client to use HTTPS (secure: true) or HTTP (secure: false) protocol. */ - secure?: boolean | undefined; - /** instance level timeout for all operations, default is 60s */ - timeout?: string | number | undefined; - /** use custom domain name */ - cname?: boolean | undefined; - /** use time (ms) of refresh STSToken interval it should be less than sts info expire interval, default is 300000ms(5min) when sts info expires. */ - refreshSTSTokenInterval?: number; - /** used by auto set stsToken、accessKeyId、accessKeySecret when sts info expires. return value must be object contains stsToken、accessKeyId、accessKeySecret */ - refreshSTSToken?: () => Promise<{ accessKeyId: string, accessKeySecret: string, stsToken: string }>; -} - -/** - * Generate STS Authorization - */ -export class STS { - constructor(options: STSOptions); - - assumeRole( - roleArn: string, - /** - * RAM Policy config object or valid JSON string - */ - policy?: object | string, // TODO: RAM policy type - expirationSeconds?: number, - session?: string, - options?: { - timeout: number; - /** - * ctx param in urllib's request param - */ - ctx: any; - }, - ): Promise<{ credentials: Credentials }>; -} - -export interface Credentials { - /** - * STS access key id. - */ - AccessKeyId: string; - - /** - * STS access key secret. - */ - AccessKeySecret: string; - - /** - * STS token. - */ - SecurityToken: string; - - /** - * STS expiration UTC time in ISO format. - */ - Expiration: string; -} - -export interface STSOptions { - /** - * Access key id. - */ - accessKeyId: string; - - /** - * Access key secret. - */ - accessKeySecret: string; -} - -export interface Bucket { - name: string; - region: string; - creationDate: string; - StorageClass: StorageType; -} - -export type ACLType = 'public-read-write' | 'public-read' | 'private'; - -export type HTTPMethods = 'GET' | 'POST' | 'DELETE' | 'PUT'; - -export type RedundancyType = 'LRS' | 'ZRS'; - -export type RuleStatusType = 'Enabled' | 'Disabled'; - -export interface LifecycleRule { - /** rule id, if not set, OSS will auto create it with random string. */ - id?: string | undefined; - /** store prefix */ - prefix: string; - /** rule status, allow values: Enabled or Disabled */ - status: RuleStatusType; - /** expire after the days */ - days?: number | string | undefined; - /** expire date, e.g.: 2022-10-11T00:00:00.000Z date and days only set one. */ - date: string; -} - -export interface CORSRule { - /** configure for Access-Control-Allow-Origin header */ - allowedOrigin: string | string[]; - /** configure for Access-Control-Allow-Methods header */ - allowedMethod: string | string[]; - /** configure for Access-Control-Allow-Headers header */ - allowedHeader?: string | string[] | undefined; - /** configure for Access-Control-Expose-Headers header */ - exposeHeader?: string | string[] | undefined; - /** configure for Access-Control-Max-Age header */ - maxAgeSeconds?: string | string[] | undefined; -} - -export interface BucketPolicy { - Version: string; - Statement: Array<{ - Action: string[]; - Effect: 'Allow' | 'Deny'; - Principal: string[]; - Resource: string[]; - }>; -} - -export interface Checkpoint { - /** The file object selected by the user, if the browser is restarted, it needs the user to manually trigger the settings */ - file: any; - /** object key */ - name: string; - fileSize: number; - partSize: number; - uploadId: string; - doneParts: Array<{ number: number; etag: string }>; -} - -export interface ObjectPart { - PartNumber: number; - /** {Date} Time when a part is uploaded. */ - LastModified: any; - ETag: string; - size: number; -} - -export interface Upload { - name: string; - uploadId: string; - initiated: any; -} - -export interface Channel { - Name: string; - Description: string; - Status: string; - LastModified: string; - PublishUrls: string[]; - PlayUrls: string[]; -} - -export interface ChannelHistory { - StartTime: string; - EndTime: string; - /** the remote addr */ - RemoteAddr: string; -} - -// parameters type -export interface ListBucketsQueryType { - /** search buckets using prefix key */ - prefix?: string | undefined; - /** search start from marker, including marker key */ - marker?: string | undefined; - /** max buckets, default is 100, limit to 1000 */ - 'max-keys'?: string | number | undefined; -} - -export interface PutBucketOptions { - acl: ACLType; - dataRedundancyType: RedundancyType; - timeout: number; - storageClass: StorageType; -} - -export interface PutBucketWebsiteConfig { - /** default page, e.g.: index.html */ - index: string; - /** error page, e.g.: 'error.html' */ - error?: string | undefined; -} - -export interface ListV2ObjectsQuery { - /** search object using prefix key */ - prefix?: string; - /** search start from token, including token key */ - 'continuation-token'?: string; - /** only search current dir, not including subdir */ - delimiter?: string | number; - /** max objects, default is 100, limit to 1000 */ - 'max-keys'?: string; - /** - * The name of the object from which the list operation begins. - * If this parameter is specified, objects whose names are alphabetically greater than the start-after parameter value are returned. - */ - 'start-after'?: string; - /** Specifies whether to include the information about object owners in the response. */ - 'fetch-owner'?: boolean; - /** Specifies that the object names in the response are URL-encoded. */ - 'encoding-type'?: 'url' | ''; -} - -export interface PutStreamOptions { - /** the stream length, chunked encoding will be used if absent */ - contentLength?: number | undefined; - /** the operation timeout */ - timeout: number; - /** custom mime, will send with Content-Type entity header */ - mime: string; - meta: UserMeta; - callback: ObjectCallback; - headers?: object | undefined; -} - -export interface AppendObjectOptions { - /** specify the position which is the content length of the latest object */ - position?: string | undefined; - /** the operation timeout */ - timeout?: number | undefined; - /** custom mime, will send with Content-Type entity header */ - mime?: string | undefined; - meta?: UserMeta | undefined; - headers?: object | undefined; -} - -export interface AppendObjectResult { - name: string; - /** the url of oss */ - url: string; - res: NormalSuccessResponse; - /** the next position */ - nextAppendPosition: string; -} - -export interface DeleteMultiOptions { - /** quite mode or verbose mode, default is false */ - quiet?: boolean | undefined; - timeout?: number | undefined; -} - -export interface DeleteMultiResult { - /** deleted object names list */ - deleted?: string[] | undefined; - res: NormalSuccessResponse; -} - -export interface ResponseHeaderType { - 'content-type'?: string | undefined; - 'content-disposition'?: string | undefined; - 'cache-control'?: string | undefined; -} - -export interface SignatureUrlOptions { - /** after expires seconds, the url will become invalid, default is 1800 */ - expires?: number | undefined; - /** the HTTP method, default is 'GET' */ - method?: HTTPMethods | undefined; - /** set the request content type */ - 'Content-Type'?: string | undefined; - /** image process params, will send with x-oss-process e.g.: {process: 'image/resize,w_200'} */ - process?: string | undefined; - /** traffic limit, range: 819200~838860800 */ - trafficLimit?: number | undefined; - /** additional signature parameters in url */ - subResource?: object | undefined; - /** set the response headers for download */ - response?: ResponseHeaderType | undefined; - /** set the callback for the operation */ - callback?: ObjectCallback | undefined; -} - -export interface GetACLResult { - acl: ACLType; - res: NormalSuccessResponse; -} - -export interface InitMultipartUploadOptions { - timeout?: number | undefined; - /** Mime file type */ - mime?: string | undefined; - meta?: UserMeta | undefined; - headers?: object | undefined; -} - -export interface InitMultipartUploadResult { - res: { status: number; headers: object; size: number; rt: number }; - /** bucket name */ - bucket: string; - /** object name store on OSS */ - name: string; - /** upload id, use for uploadPart, completeMultipart */ - uploadId: string; -} - -export interface UploadPartResult { - name: string; - etag: string; - res: NormalSuccessResponse; -} - -export interface CompleteMultipartUploadOptions { - timeout?: number | undefined; - callback?: ObjectCallback | undefined; - headers?: object | undefined; -} - -export interface CompleteMultipartUploadResult { - bucket: string; - name: string; - etag: string; - data: object; - res: NormalSuccessResponse; -} - -export interface MultipartUploadOptions { - /** the number of parts to be uploaded in parallel */ - parallel?: number | undefined; - /** the suggested size for each part */ - partSize?: number | undefined; - /** the progress callback called after each successful upload of one part */ - progress?: ((...args: any[]) => any) | undefined; - /** the checkpoint to resume upload, if this is provided, it will continue the upload from where interrupted, otherwise a new multipart upload will be created. */ - checkpoint?: Checkpoint | undefined; - meta?: UserMeta | undefined; - mime?: string | undefined; - callback?: ObjectCallback | undefined; - headers?: object | undefined; - timeout?: number | undefined; - /** {Object} only uploadPartCopy api used, detail */ - copyheaders?: object | undefined; -} - -export interface MultipartUploadResult { - bucket: string; - name: string; - etag: string; - data: object; - res: NormalSuccessResponse; -} - -export interface MultipartUploadCopyResult { - bucket: string; - name: string; - etag: string; - res: NormalSuccessResponse; -} - -export interface MultipartUploadCopySourceData { - /** the source object name */ - sourceKey: string; - /** sourceData. the source bucket name */ - sourceBucketName: string; - /** data copy start byte offset, e.g: 0 */ - startOffset: number; - /** data copy end byte offset, e.g: 102400 */ - endOffset: number; -} - -export interface ListPartsQuery { - /** The maximum part number in the response of the OSS. default value: 1000. */ - 'max-parts': number; - /** Starting position of a specific list. A part is listed only when the part number is greater than the value of this parameter. */ - 'part-number-marker': number; - /** Specify the encoding of the returned content and the encoding type. Optional value: url */ - 'encoding-type': string; -} - -export interface ListPartsResult { - uploadId: string; - bucket: string; - name: string; - PartNumberMarker: number; - nextPartNumberMarker: number; - maxParts: number; - isTruncated: boolean; - parts: ObjectPart[]; - res: NormalSuccessResponse; -} - -export interface ListUploadsQuery { - prefix?: string | undefined; - 'max-uploads'?: number | undefined; - 'key-marker'?: string | undefined; - 'upload-id-marker'?: string | undefined; -} - -export interface ListUploadsResult { - res: NormalSuccessResponse; - bucket: string; - nextKeyMarker: any; - nextUploadIdMarker: any; - isTruncated: boolean; - uploads: Upload[]; -} - -export interface PutChannelConf { - Description?: string | undefined; - Status?: string | undefined; - Target?: { - Type: string; - FragDuration: number; - FragCount: number; - PlaylistName: string; - } | undefined; -} - -export interface PutChannelResult { - publishUrls: string[]; - playUrls: string[]; - res: NormalSuccessResponse; -} - -export interface GetChannelResult { - Status: string; - ConnectedTime?: string | undefined; - RemoteAddr?: string | undefined; - Video?: object | undefined; - Audio?: object | undefined; - res: NormalSuccessResponse; -} - -export interface ListChannelsQuery { - /** the channel id prefix (returns channels with this prefix) */ - prefix: string; - /** the channel id marker (returns channels after this id) */ - marker: string; - /** max number of channels to return */ - 'max-keys ': number; -} - -export interface ListChannelsResult { - channels: Channel[]; - nextMarker: string | null; - isTruncated: boolean; - res: NormalSuccessResponse; -} - -export interface ChannelHistoryResult { - records: ChannelHistory; - res: NormalSuccessResponse; -} - -export interface GetRtmpUrlOptions { - /** the expire time in seconds of the url */ - expires?: number | undefined; - /** the additional parameters for url, e.g.: {playlistName: 'play.m3u8'} */ - params?: object | undefined; - /** the operation timeout */ - timeout?: number | undefined; -} - -export interface GetBucketPolicyResult { - policy: BucketPolicy | null; - status: number; - res: NormalSuccessResponse; -} - -export interface PostObjectParams { - policy: string; - OSSAccessKeyId: string; - Signature: string; -} - -// cluster -export interface ClusterType { - host: string; - accessKeyId: string; - accessKeySecret: string; -} - -export interface ClusterOptions { - clusters: ClusterType[]; - schedule?: string | undefined; -} - -export class ClusterClient { - constructor(options: ClusterOptions); - - list(query: ListObjectsQuery | null, options: RequestOptions): Promise; - - /** - * @since 6.12.0 - */ - listV2(query: ListV2ObjectsQuery | null, options: RequestOptions): Promise; - - put(name: string, file: any, options?: PutObjectOptions): Promise; - - putStream( - name: string, - stream: any, - options?: PutStreamOptions, - ): Promise<{ name: string; res: NormalSuccessResponse }>; - - head(name: string, options?: HeadObjectOptions): Promise; - - get(name: string, file?: any, options?: GetObjectOptions): Promise; - - getStream(name?: string, options?: GetStreamOptions): Promise; - - delete(name: string, options?: RequestOptions): Promise; - - copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - - putMeta(name: string, meta: UserMeta, options: RequestOptions): Promise; - - deleteMulti(names: string[], options?: DeleteMultiOptions): Promise; - - signatureUrl(name: string, options?: SignatureUrlOptions): string; - - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; - - putACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - restore(name: string, options?: RequestOptions): Promise; -} - -// image -export interface ImageClientOptions { - /** your image service domain that binding to a OSS bucket */ - imageHost: string; - /** access key you create on aliyun console website */ - accessKeyId: string; - /** access secret you create */ - accessKeySecret: string; - /** the default bucket you want to access If you don't have any bucket, please use putBucket() create one first. */ - bucket: string; - /** the bucket data region location, please see Data Regions, default is oss-cn-hangzhou */ - region?: string | undefined; - /** access OSS with aliyun internal network or not, default is false If your servers are running on aliyun too, you can set true to save lot of money. */ - internal?: boolean | undefined; - /** instance level timeout for all operations, default is 60s */ - timeout?: string | number | undefined; -} - -export interface ImageGetOptions { - timeout?: number | undefined; - headers?: object | undefined; -} - -export interface StyleData { - /** style name */ - Name: string; - /** style content */ - Content: string; - /** style create time */ - CreateTime: string; - /** style last modify time */ - LastModifyTime: string; -} - -export class ImageClient { - constructor(options: ImageClientOptions); - - /** - * Get an image from the image channel. - */ - get(name: string, file?: any, options?: ImageGetOptions): Promise<{ content: any; res: NormalSuccessResponse }>; - - /** - * Get an image read stream. - */ - getStream(name: string, options?: ImageGetOptions): Promise<{ stream: any; res: NormalSuccessResponse }>; - - /** - * Get a image exif info by image object name from the image channel. - */ - getExif(name: string, options?: RequestOptions): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * Get a image info and exif info by image object name from the image channel. - */ - getInfo(name: string, options?: RequestOptions): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * todo - */ - putStyle( - name: string, - style: string, - options?: RequestOptions, - ): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * Get a style by name from the image channel. - */ - getStyle(name: string, options?: RequestOptions): Promise<{ data: StyleData; res: NormalSuccessResponse }>; - - /** - * Get all styles from the image channel. - */ - listStyle(options?: RequestOptions): Promise; - - /** - * todo - */ - deleteStyle(styleName: string, options?: RequestOptions): Promise; - - /** - * Create a signature url for directly download. - */ - signatureUrl(name: string, options?: { expires?: string | undefined; timeout?: string | undefined }): string; - - /** - * Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken - */ - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; -} - -// base Client -export class Client implements IObjectSimple { - constructor(options: ClientOptions); - - /******************************************* the bucket operations *************************************************/ - - // base operators - /** - * List buckets in this account. - */ - listBuckets(query: ListBucketsQueryType | null, options?: RequestOptions): Promise; - - /** - * Create a new bucket. - */ - putBucket( - name: string, - options?: PutBucketOptions, - ): Promise<{ bucket: string; res: NormalSuccessResponse }>; - - /** - * Use the bucket. - */ - useBucket(name: string): void; - - /** - * Delete an empty bucket. - */ - deleteBucket(name: string, options?: RequestOptions): Promise; - - /** - * Get bucket information,include CreationDate、ExtranetEndpoint、IntranetEndpoint、Location、Name、StorageClass、 Owner、AccessControlList - */ - getBucketInfo(name: string): Promise; - - /** - * Get bucket location - */ - getBucketLocation(name: string): Promise; - - // ACL operations - /** - * Update the bucket ACL. - */ - putBucketACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - /** - * Get the bucket ACL. - * acl - acl settings string - */ - getBucketACL(name: string, options?: RequestOptions): Promise<{ acl: string; res: NormalSuccessResponse }>; - - // logging operations - /** - * Update the bucket logging settings. Log file will create every one hour and name format: -YYYY-mm-DD-HH-MM-SS-UniqueString. - */ - putBucketLogging(name: string, prefix?: string, options?: RequestOptions): Promise; - - /** - * Get the bucket logging settings. - */ - getBucketLogging( - name: string, - options?: RequestOptions, - ): Promise<{ enable: boolean; prefix: string | null; res: NormalSuccessResponse }>; - - /** - * Delete the bucket logging settings. - */ - deleteBucketLogging(name: string, options?: RequestOptions): Promise; - - // Website operations - /** - * Set the bucket as a static website. - */ - putBucketWebsite(name: string, config: PutBucketWebsiteConfig): Promise; - - /** - * Get the bucket website config. - */ - getBucketWebsite( - name: string, - options?: RequestOptions, - ): Promise<{ index: string; error: string; res: NormalSuccessResponse }>; - - /** - * Delete the bucket website config. - */ - deleteBucketWebsite(name: string, options?: RequestOptions): Promise; - - // referer operations - /** - * Set the bucket request Referer white list. - */ - putBucketReferer( - name: string, - allowEmpty: boolean, - referers: string[], - options?: RequestOptions, - ): Promise; - - /** - * Get the bucket request Referer white list. - */ - getBucketReferer( - name: string, - options?: RequestOptions, - ): Promise<{ allowEmpty: boolean; referers: string[]; res: NormalSuccessResponse }>; - - /** - * Delete the bucket request Referer white list. - */ - deleteBucketReferer(name: string, options?: RequestOptions): Promise; - - // lifecycle operations - /** - * Set the bucket object lifecycle. - */ - putBucketLifecycle( - name: string, - rules: LifecycleRule[], - options?: RequestOptions, - ): Promise; - - /** - * Get the bucket object lifecycle. - */ - getBucketLifecycle( - name: string, - options?: RequestOptions, - ): Promise<{ rules: LifecycleRule[]; res: NormalSuccessResponse }>; - - /** - * Delete the bucket object lifecycle. - */ - deleteBucketLifecycle(name: string, options?: RequestOptions): Promise; - - // CORS operations - /** - * Set CORS rules of the bucket object - */ - putBucketCORS( - name: string, - rules: CORSRule[], - options?: RequestOptions, - ): Promise; - - /** - * Get CORS rules of the bucket object. - */ - getBucketCORS(name: string): Promise<{ rules: CORSRule[]; res: NormalSuccessResponse }>; - - /** - * Delete CORS rules of the bucket object. - */ - deleteBucketCORS(name: string): Promise; - - // policy operations - /** - * Adds or modify policy for a bucket. - */ - putBucketPolicy( - name: string, - policy: BucketPolicy, - options?: RequestOptions - ): Promise<{ - status: number, - res: NormalSuccessResponse, - }>; - - /** - * Obtains the policy for a bucket. - */ - getBucketPolicy(name: string, options?: RequestOptions): Promise; - - /** - * Deletes the policy added for a bucket. - */ - deleteBucketPolicy( - name: string, - options?: RequestOptions - ): Promise<{ - status: number, - res: NormalSuccessResponse, - }>; - - /********************************************************** Object operations ********************************************/ - /** - * List objects in the bucket. - */ - list(query: ListObjectsQuery | null, options?: RequestOptions): Promise; - - /** - * Add an object to the bucket. - */ - put(name: string, file: string | Buffer | Uint8Array | Readable, options?: PutObjectOptions): Promise; - - /** - * Add a stream object to the bucket. - */ - putStream( - name: string, - stream: any, - options?: PutStreamOptions, - ): Promise<{ name: string; res: NormalSuccessResponse }>; - - /** - * Append an object to the bucket, it's almost same as put, but it can add content to existing object rather than override it. - */ - append(name: string, file: any, options?: AppendObjectOptions): Promise; - - /** - * Get the Object url. If provide baseUrl, will use baseUrl instead the default endpoint. - */ - getObjectUrl(name: string, baseUrl?: string): string; - - /** - * Get the Object url. If provide baseUrl, will use baseUrl instead the default bucket and endpoint. Suggest use generateObjectUrl instead of getObjectUrl. - */ - generateObjectUrl(name: string, baseUrl?: string): string; - - /** - * Head an object and get the meta info. - */ - head(name: string, options?: HeadObjectOptions): Promise; - - /** - * Get an object from the bucket. - */ - get(name: string, options?: GetObjectOptions): Promise; - get(name: string, file: string | Writable, options?: GetObjectOptions): Promise; - - /** - * Get an object read stream. - */ - getStream(name?: string, options?: GetStreamOptions): Promise; - - /** - * Delete an object from the bucket. - */ - delete(name: string, options?: RequestOptions): Promise; - - /** - * Copy an object from sourceName to name. - */ - copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - copy(name: string, sourceName: string, sourceBucket: string, options?: CopyObjectOptions): Promise; - - /** - * Set an exists object meta. - */ - putMeta(name: string, meta: UserMeta, options: RequestOptions): Promise; - - /** - * Delete multi objects in one request. - */ - deleteMulti(names: string[], options?: DeleteMultiOptions): Promise; - - /** - * Create a signature url for download or upload object. When you put object with signatureUrl ,you need to pass Content-Type.Please look at the example. - */ - signatureUrl(name: string, options?: SignatureUrlOptions): string; - - /** - * Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken - */ - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; - - /** - * Set object's ACL. - */ - putACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - /** - * Get object's ACL. - */ - getACL(name: string, options?: RequestOptions): Promise; - - /** - * Restore Object. - */ - restore(name: string, options?: RequestOptions): Promise; - - /** - * multi upload - */ - initMultipartUpload(name: string, options?: InitMultipartUploadOptions): Promise; - - /** - * After initiating a Multipart Upload event, you can upload data in parts based on the specified object name and Upload ID. - */ - uploadPart( - name: string, - uploadId: string, - partNo: number, - file: any, - start: number, - end: number, - options?: RequestOptions, - ): Promise; - - /** - * Using Upload Part Copy, you can copy data from an existing object and upload a part of the data. - * When copying a file larger than 1 GB, you must use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - */ - uploadPartCopy( - name: string, - uploadId: string, - partNo: number, - range: string, - sourceData: { sourceKey: string; sourceBucketName: string }, - options: { timeout?: number | undefined; headers?: object | undefined }, - ): Promise; - - /** - * After uploading all data parts, you must call the Complete Multipart Upload API to complete Multipart Upload for the entire file. - */ - completeMultipartUpload( - name: string, - uploadId: string, - parts: Array<{ number: number; etag: string }>, - options?: CompleteMultipartUploadOptions, - ): Promise; - - /** - * Upload file with OSS multipart. - */ - multipartUpload(name: string, file: any, options: MultipartUploadOptions): Promise; - - /** - * Copy file with OSS multipart. - * this function contains head, initMultipartUpload, uploadPartCopy, completeMultipartUpload. - * When copying a file larger than 1 GB, you should use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - */ - multipartUploadCopy( - name: string, - sourceData: MultipartUploadCopySourceData, - options?: MultipartUploadOptions, - ): Promise; - - /** - * The ListParts command can be used to list all successfully uploaded parts mapped to a specific upload ID, i.e.: those not completed and not aborted. - */ - listParts( - name: string, - uploadId: string, - query?: ListPartsQuery, - options?: RequestOptions, - ): Promise; - - /** - * List on-going multipart uploads, i.e.: those not completed and not aborted. - */ - listUploads(query: ListUploadsQuery, options?: RequestOptions): Promise; - - /** - * Abort a multipart upload for object. - */ - abortMultipartUpload( - name: string, - uploadId: string, - options?: RequestOptions, - ): Promise; - - /** - * get postObject params. - */ - calculatePostSignature( - /** - * policy config object or JSON string - */ - policy: object | string - ): PostObjectParams; - - /************************************************ RTMP Operations *************************************************************/ - /** - * Create a live channel. - */ - putChannel(id: string, conf: PutChannelConf, options?: RequestOptions): Promise; - - /** - * Get live channel info. - */ - getChannel( - id: string, - options?: RequestOptions, - ): Promise<{ data: PutChannelConf; res: NormalSuccessResponse }>; - - /** - * Delete a live channel. - */ - deleteChannel(id: string, options?: RequestOptions): Promise; - - /** - * Change the live channel status. - */ - putChannelStatus(id: string, status?: string, options?: RequestOptions): Promise; - - /** - * Get the live channel status. - */ - getChannelStatus(id: string, options?: RequestOptions): Promise; - - /** - * List channels. - */ - listChannels(query: ListChannelsQuery, options?: RequestOptions): Promise; - - /** - * Get the live channel history. - */ - getChannelHistory(id: string, options?: RequestOptions): Promise; - - /** - * Create a VOD playlist for the channel. - */ - createVod( - id: string, - name: string, - time: { startTime: number; endTime: number }, - options?: RequestOptions, - ): Promise; - - /** - * Get signatured rtmp url for publishing. - */ - getRtmpUrl(channelId?: string, options?: GetRtmpUrlOptions): string; -} diff --git a/index.test-d.ts b/index.test-d.ts deleted file mode 100644 index ea9883aec..000000000 --- a/index.test-d.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { expectType } from 'tsd'; -import { Writable, Readable } from 'stream'; -import { - GetObjectOptions, - IObjectSimple, - SignatureUrlOptions, - ListObjectsQuery, - RequestOptions, - ListObjectResult, - PutObjectOptions, - PutObjectResult, - NormalSuccessResponse, - HeadObjectOptions, - HeadObjectResult, - GetObjectResult, - GetStreamOptions, - GetStreamResult, - CopyObjectOptions, - CopyAndPutMetaResult, - Client, - ImageClient, - ClusterClient, -} from '.'; - -const getObjectOptions = {} as GetObjectOptions; -expectType(getObjectOptions.process); - -class SimpleClient implements IObjectSimple { - async list(query: ListObjectsQuery | null, options: RequestOptions): Promise { - console.log(query, options); - return {} as any; - } - async put(name: string, file: string | Buffer | Uint8Array | Readable, options?: PutObjectOptions): Promise { - console.log(name, file, options); - return {} as any; - } - async head(name: string, options?: HeadObjectOptions): Promise { - console.log(name, options); - return {} as any; - } - - async get(name: string, options?: GetObjectOptions): Promise; - async get(name: string, file: string | Writable, options?: GetObjectOptions): Promise; - async get(name: string, file?: string | Writable | GetObjectOptions, options?: GetObjectOptions): Promise { - console.log(name, file, options); - return {} as any; - } - async getStream(name?: string, options?: GetStreamOptions): Promise { - console.log(name, options); - return {} as any; - } - async delete(name: string, options?: RequestOptions): Promise { - console.log(name, options); - return {} as any; - } - - async copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - async copy(name: string, sourceName: string, sourceBucket: string, options?: CopyObjectOptions): Promise; - async copy(name: string, sourceName: string, sourceBucket?: string | CopyObjectOptions, options?: CopyObjectOptions): Promise { - console.log(name, sourceName, sourceBucket, options); - return {} as any; - } - - async asyncSignatureUrl(name: string, options?: SignatureUrlOptions) { - console.log(name, options); - return ''; - } -} - -const simpleClient = new SimpleClient(); -expectType>(simpleClient.get('foo')); -expectType>(simpleClient.get('foo', { timeout: 10 })); -expectType>(simpleClient.get('foo', 'file.path')); - -const ossClient = {} as Client; -expectType>(ossClient.get('foo')); -expectType>(ossClient.list({ 'max-keys': 100 })); - -const clusterClient = {} as ClusterClient; -expectType>(clusterClient.get('foo')); - -const imageClient = {} as ImageClient; -expectType>(imageClient.get('foo')); - -const bytes = {} as Uint8Array; -expectType>(simpleClient.put('foo', bytes)); diff --git a/lib/bucket.js b/lib/bucket.js deleted file mode 100644 index 05c2affb3..000000000 --- a/lib/bucket.js +++ /dev/null @@ -1,342 +0,0 @@ -const assert = require('assert'); -const { checkBucketName } = require('../lib/common/utils/checkBucketName'); -const { formatTag } = require('../lib/common/utils/formatTag'); - -const proto = exports; - - -function toArray(obj) { - if (!obj) return []; - if (Array.isArray(obj)) return obj; - return [ obj ]; -} - -/** - * Bucket opertaions - */ - -proto.listBuckets = async function listBuckets(query = {}, options = {}) { - // prefix, marker, max-keys - - const { subres = {} } = query; - const rest = {}; - for (const key in query) { - if (key !== 'subres') { - rest[key] = query[key]; - } - } - const params = this._bucketRequestParams( - 'GET', - '', - Object.assign(subres, options.subres), - options - ); - - params.query = rest; - - const result = await this.request(params); - - if (result.status === 200) { - const data = await this.parseXML(result.data); - let buckets = data.Buckets || null; - if (buckets) { - if (buckets.Bucket) { - buckets = buckets.Bucket; - } - if (!Array.isArray(buckets)) { - buckets = [ buckets ]; - } - buckets = buckets.map(item => ({ - name: item.Name, - region: item.Location, - creationDate: item.CreationDate, - storageClass: item.StorageClass, - StorageClass: item.StorageClass, - tag: formatTag(item), - })); - } - return { - buckets, - owner: { - id: data.Owner.ID, - displayName: data.Owner.DisplayName, - }, - isTruncated: data.IsTruncated === 'true', - nextMarker: data.NextMarker || null, - res: result.res, - }; - } - - throw await this.requestError(result); -}; - -proto.useBucket = function useBucket(name) { - checkBucketName(name); - return this.setBucket(name); -}; - -proto.setBucket = function useBucket(name) { - checkBucketName(name); - this.options.bucket = name; - return this; -}; - -proto.getBucket = function getBucket() { - return this.options.bucket; -}; - -proto.getBucketLocation = async function getBucketLocation(name, options) { - checkBucketName(name); - name = name || this.getBucket(); - const params = this._bucketRequestParams('GET', name, 'location', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - location: result.data, - res: result.res, - }; -}; - -proto.getBucketInfo = async function getBucketInfo(name, options) { - checkBucketName(name); - name = name || this.getBucket(); - const params = this._bucketRequestParams('GET', name, 'bucketInfo', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - bucket: result.data.Bucket, - res: result.res, - }; -}; - -proto.deleteBucket = async function deleteBucket(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, '', options); - const result = await this.request(params); - if (result.status === 200 || result.status === 204) { - return { - res: result.res, - }; - } - throw await this.requestError(result); -}; - -// acl - -proto.putBucketACL = async function putBucketACL(name, acl, options) { - checkBucketName(name); - const params = this._bucketRequestParams('PUT', name, 'acl', options); - params.headers = { - 'x-oss-acl': acl, - }; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - bucket: (result.headers.location && result.headers.location.substring(1)) || null, - res: result.res, - }; -}; - -proto.getBucketACL = async function getBucketACL(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'acl', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - acl: result.data.AccessControlList.Grant, - owner: { - id: result.data.Owner.ID, - displayName: result.data.Owner.DisplayName, - }, - res: result.res, - }; -}; - -// logging - -proto.putBucketLogging = async function putBucketLogging(name, prefix, options) { - checkBucketName(name); - const params = this._bucketRequestParams('PUT', name, 'logging', options); - let xml = `${'\n\n' + - '\n'}${name}\n`; - if (prefix) { - xml += `${prefix}\n`; - } - xml += '\n'; - params.content = xml; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -proto.getBucketLogging = async function getBucketLogging(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'logging', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const enable = result.data.LoggingEnabled; - return { - enable: !!enable, - prefix: (enable && enable.TargetPrefix) || null, - res: result.res, - }; -}; - -proto.deleteBucketLogging = async function deleteBucketLogging(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'logging', options); - params.successStatuses = [ 204, 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -proto.putBucketCORS = async function putBucketCORS(name, rules, options) { - checkBucketName(name); - rules = rules || []; - assert(rules.length, 'rules is required'); - rules.forEach(rule => { - assert(rule.allowedOrigin, 'allowedOrigin is required'); - assert(rule.allowedMethod, 'allowedMethod is required'); - }); - - const params = this._bucketRequestParams('PUT', name, 'cors', options); - let xml = '\n'; - const parseOrigin = val => { - xml += `${val}`; - }; - const parseMethod = val => { - xml += `${val}`; - }; - const parseHeader = val => { - xml += `${val}`; - }; - const parseExposeHeader = val => { - xml += `${val}`; - }; - for (let i = 0, l = rules.length; i < l; i++) { - const rule = rules[i]; - xml += ''; - - toArray(rule.allowedOrigin).forEach(parseOrigin); - toArray(rule.allowedMethod).forEach(parseMethod); - toArray(rule.allowedHeader).forEach(parseHeader); - toArray(rule.exposeHeader).forEach(parseExposeHeader); - if (rule.maxAgeSeconds) { - xml += `${rule.maxAgeSeconds}`; - } - xml += ''; - } - xml += ''; - params.content = xml; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -proto.getBucketCORS = async function getBucketCORS(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'cors', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const rules = []; - if (result.data && result.data.CORSRule) { - let { CORSRule } = result.data; - if (!Array.isArray(CORSRule)) CORSRule = [ CORSRule ]; - CORSRule.forEach(rule => { - const r = {}; - Object.keys(rule).forEach(key => { - r[key.slice(0, 1).toLowerCase() + key.slice(1, key.length)] = rule[key]; - }); - rules.push(r); - }); - } - return { - rules, - res: result.res, - }; -}; - -proto.deleteBucketCORS = async function deleteBucketCORS(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'cors', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -// referer - -proto.putBucketReferer = async function putBucketReferer(name, allowEmpty, referers, options) { - checkBucketName(name); - const params = this._bucketRequestParams('PUT', name, 'referer', options); - let xml = '\n\n'; - xml += ` ${allowEmpty ? 'true' : 'false'}\n`; - if (referers && referers.length > 0) { - xml += ' \n'; - for (let i = 0; i < referers.length; i++) { - xml += ` ${referers[i]}\n`; - } - xml += ' \n'; - } else { - xml += ' \n'; - } - xml += ''; - params.content = xml; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -proto.getBucketReferer = async function getBucketReferer(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'referer', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - let referers = result.data.RefererList.Referer || null; - if (referers) { - if (!Array.isArray(referers)) { - referers = [ referers ]; - } - } - return { - allowEmpty: result.data.AllowEmptyReferer === 'true', - referers, - res: result.res, - }; -}; - -proto.deleteBucketReferer = async function deleteBucketReferer(name, options) { - checkBucketName(name); - return await this.putBucketReferer(name, true, null, options); -}; - -// private apis - -proto._bucketRequestParams = function _bucketRequestParams(method, bucket, subres, options) { - return { - method, - bucket, - subres, - timeout: options && options.timeout, - ctx: options && options.ctx, - }; -}; diff --git a/lib/client.js b/lib/client.js deleted file mode 100644 index 12399efb6..000000000 --- a/lib/client.js +++ /dev/null @@ -1,353 +0,0 @@ -const debug = require('util').debuglog('oss-client:client'); -const sendToWormhole = require('stream-wormhole'); -const xml = require('xml2js'); -const merge = require('merge-descriptors'); -const utility = require('utility'); -const urllib = require('urllib'); -const pkg = require('../package.json'); -const signUtils = require('./common/signUtils'); -const _initOptions = require('./common/client/initOptions'); -const { createRequest } = require('./common/utils/createRequest'); -const { encoder } = require('./common/utils/encoder'); -const { getReqUrl } = require('./common/client/getReqUrl'); -const { setSTSToken } = require('./common/utils/setSTSToken'); -const { retry } = require('./common/utils/retry'); -const { isFunction } = require('./common/utils/isFunction'); - -function Client(options, ctx) { - if (!(this instanceof Client)) { - return new Client(options, ctx); - } - - if (options && options.inited) { - this.options = options; - } else { - this.options = Client.initOptions(options); - } - - // support custom agent and urllib client - if (this.options.urllib) { - this.urllib = this.options.urllib; - } else { - this.urllib = urllib; - } - this.ctx = ctx; - this.userAgent = this._getUserAgent(); - this.stsTokenFreshTime = new Date(); -} - -/** - * Expose `Client` - */ - -module.exports = Client; - -Client.initOptions = function initOptions(options) { - return _initOptions(options); -}; - -/** - * prototype - */ - -const proto = Client.prototype; - -/** - * Object operations - */ -merge(proto, require('./common/object')); -merge(proto, require('./object')); -merge(proto, require('./common/image')); -/** - * Bucket operations - */ -merge(proto, require('./common/bucket')); -merge(proto, require('./bucket')); -// multipart upload -merge(proto, require('./managed-upload')); -/** - * RTMP operations - */ -merge(proto, require('./rtmp')); - -/** - * common multipart-copy - */ -merge(proto, require('./common/multipart-copy')); -/** - * Common module parallel - */ -merge(proto, require('./common/parallel')); -/** - * Multipart operations - */ -merge(proto, require('./common/multipart')); -/** - * ImageClient class - */ -Client.ImageClient = require('./image')(Client); -/** - * Cluster Client class - */ -Client.ClusterClient = require('./cluster')(Client); - -/** - * STS Client class - */ -Client.STS = require('./sts'); - -/** - * get OSS signature - * @param {String} stringToSign stringToSign - * @return {String} the signature - */ -proto.signature = function signature(stringToSign) { - debug('authorization stringToSign: %s', stringToSign); - - return signUtils.computeSignature(this.options.accessKeySecret, stringToSign, this.options.headerEncoding); -}; - -proto._getReqUrl = getReqUrl; - -/** - * get author header - * - * "Authorization: OSS " + Access Key Id + ":" + Signature - * - * Signature = base64(hmac-sha1(Access Key Secret + "\n" - * + VERB + "\n" - * + CONTENT-MD5 + "\n" - * + CONTENT-TYPE + "\n" - * + DATE + "\n" - * + CanonicalizedOSSHeaders - * + CanonicalizedResource)) - * - * @param {String} method - * @param {String} resource - * @param {Object} header - * @return {String} - * - * @private - */ - -proto.authorization = function authorization(method, resource, subres, headers) { - const stringToSign = signUtils.buildCanonicalString(method.toUpperCase(), resource, { - headers, - parameters: subres, - }); - - return signUtils.authorization( - this.options.accessKeyId, - this.options.accessKeySecret, - stringToSign, - this.options.headerEncoding - ); -}; - -/** - * request oss server - * @param {Object} params - * - {String} object - * - {String} bucket - * - {Object} [headers] - * - {Object} [query] - * - {Buffer} [content] - * - {Stream} [stream] - * - {Stream} [writeStream] - * - {String} [mime] - * - {Boolean} [xmlResponse] - * - {Boolean} [customResponse] - * - {Number} [timeout] - * - {Object} [ctx] request context, default is `this.ctx` - * - * @private - */ - -proto.request = async function(params) { - if (this.options.retryMax) { - return await retry(request.bind(this), this.options.retryMax, { - errorHandler: err => { - const _errHandle = _err => { - if (params.stream) return false; - const statusErr = [ -1, -2 ].includes(_err.status); - const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); - return statusErr && requestErrorRetryHandle(_err); - }; - if (_errHandle(err)) return true; - return false; - }, - })(params); - } - return await request.call(this, params); - -}; - -async function request(params) { - if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { - await setSTSToken.call(this); - } - const reqParams = createRequest.call(this, params); - let result; - let reqErr; - // try ctx.httpclient first - const urllib = reqParams.params.ctx?.httpclient ?? reqParams.params.ctx?.urllib ?? this.urllib; - try { - result = await urllib.request(reqParams.url, reqParams.params); - debug('response %s %s, got %s, headers: %j', params.method, reqParams.url, result.status, result.headers); - } catch (err) { - reqErr = err; - } - let err; - if (result && params.successStatuses && params.successStatuses.indexOf(result.status) === -1) { - err = await this.requestError(result); - err.params = params; - } else if (reqErr) { - err = await this.requestError(reqErr); - } - - if (err) { - if (params.customResponse && result && result.res) { - // consume the response stream - await sendToWormhole(result.res); - } - - if (err.name === 'ResponseTimeoutError') { - err.message = `${err.message.split(',')[0]}, please increase the timeout or use multipartDownload.`; - } - throw err; - } - - if (params.xmlResponse) { - result.data = await this.parseXML(result.data); - } - return result; -} - -proto._getResource = function _getResource(params) { - let resource = '/'; - if (params.bucket) resource += `${params.bucket}/`; - if (params.object) resource += encoder(params.object, this.options.headerEncoding); - - return resource; -}; - -proto._escape = function _escape(name) { - return utility.encodeURIComponent(name).replace(/%2F/g, '/'); -}; - -/* - * Get User-Agent for node.js - * @example - * oss-client/1.0.0 Node.js/5.3.0 (darwin; arm64) - */ - -proto._getUserAgent = function _getUserAgent() { - const sdk = `${pkg.name}/${pkg.version}`; - const platform = `Node.js/${process.version.slice(1)} (${process.platform}; ${process.arch})`; - return `${sdk} ${platform}`; -}; - -/** - * thunkify xml.parseString - * @param {String|Buffer} str - * - * @private - */ - -proto.parseXML = function parseXMLThunk(str) { - return new Promise((resolve, reject) => { - if (Buffer.isBuffer(str)) { - str = str.toString(); - } - xml.parseString( - str, - { - explicitRoot: false, - explicitArray: false, - }, - (err, result) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); -}; - -/** - * generater a request error with request response - * @param {Object} result - * - * @private - */ - -proto.requestError = async function requestError(result) { - let err = null; - if (result.name === 'ResponseTimeoutError') { - err = new Error(result.message); - err.name = result.name; - } else if (!result.data || !result.data.length) { - if (result.status === -1 || result.status === -2) { - // -1 is net error , -2 is timeout - err = new Error(result.message); - err.name = result.name; - err.status = result.status; - // try to use raw code first - err.code = result.code || result.name; - } else { - // HEAD not exists resource - if (result.status === 404) { - err = new Error('Object not exists'); - err.name = 'NoSuchKeyError'; - err.status = 404; - err.code = 'NoSuchKey'; - } else if (result.status === 412) { - err = new Error('Pre condition failed'); - err.name = 'PreconditionFailedError'; - err.status = 412; - err.code = 'PreconditionFailed'; - } else { - err = new Error(`Unknow error, status: ${result.status}, raw error: ${result}`); - err.name = 'UnknownError'; - err.status = result.status; - } - err.requestId = result.headers?.['x-oss-request-id']; - err.host = ''; - } - } else { - const message = String(result.data); - debug('request response error data: %s', message); - - let info; - try { - info = (await this.parseXML(message)) || {}; - } catch (error) { - debug(message); - error.message += `\nraw xml: ${message}`; - error.status = result.status; - error.requestId = result.headers['x-oss-request-id']; - return error; - } - - let msg = info.Message || `unknow request error, status: ${result.status}`; - if (info.Condition) { - msg += ` (condition: ${info.Condition})`; - } - err = new Error(msg); - err.name = info.Code ? `${info.Code}Error` : 'UnknownError'; - err.status = result.status; - err.code = info.Code; - err.requestId = info.RequestId; - err.hostId = info.HostId; - } - - debug('generate error %j', err); - return err; -}; - -proto.setSLDEnabled = function setSLDEnabled(enable) { - this.options.sldEnable = !!enable; - return this; -}; diff --git a/lib/cluster.js b/lib/cluster.js deleted file mode 100644 index 459e620db..000000000 --- a/lib/cluster.js +++ /dev/null @@ -1,233 +0,0 @@ -const Base = require('sdk-base'); -const copy = require('copy-to'); - -const RR = 'roundRobin'; -const MS = 'masterSlave'; - -module.exports = function(OssClient) { - class Client extends Base { - constructor(options) { - if (!options || !Array.isArray(options.cluster)) { - throw new Error('require options.cluster to be an array'); - } - super(options); - - this.clients = []; - this.availables = {}; - - for (let i = 0; i < options.cluster.length; i++) { - const opt = options.cluster[i]; - copy(options).pick('timeout', 'agent', 'urllib').to(opt); - this.clients.push(new OssClient(opt)); - this.availables[i] = true; - } - - this.schedule = options.schedule || RR; - // only read from master, default is false - this.masterOnly = !!options.masterOnly; - this.index = 0; - - const heartbeatInterval = options.heartbeatInterval || 10000; - this._checkAvailableLock = false; - this._timerId = this._deferInterval(this._checkAvailable.bind(this, true), heartbeatInterval); - this._ignoreStatusFile = options.ignoreStatusFile || false; - this._currentIP = require('address').ip(); - this._init(); - } - } - - const proto = Client.prototype; - - const GET_METHODS = [ - 'head', - 'get', - 'getStream', - 'list', - 'getACL', - ]; - - const PUT_METHODS = [ - 'put', - 'putStream', - 'delete', - 'deleteMulti', - 'copy', - 'putMeta', - 'putACL', - ]; - - GET_METHODS.forEach(method => { - proto[method] = async function(...args) { - const client = this.chooseAvailable(); - let lastError; - try { - return await client[method](...args); - } catch (err) { - if (err.status && err.status >= 200 && err.status < 500) { - // 200 ~ 499 belong to normal response, don't try again - throw err; - } - // < 200 || >= 500 need to retry from other cluser node - lastError = err; - } - - for (let i = 0; i < this.clients.length; i++) { - const c = this.clients[i]; - if (c !== client) { - try { - return await c[method].apply(client, args); - } catch (err) { - if (err.status && err.status >= 200 && err.status < 500) { - // 200 ~ 499 belong to normal response, don't try again - throw err; - } - // < 200 || >= 500 need to retry from other cluser node - lastError = err; - } - } - } - - lastError.message += ' (all clients are down)'; - throw lastError; - }; - }); - - // must cluster node write success - PUT_METHODS.forEach(method => { - proto[method] = async function(...args) { - const res = await Promise.all(this.clients.map(client => client[method](...args))); - return res[0]; - }; - }); - - proto.signatureUrl = function signatureUrl(/* name */...args) { - const client = this.chooseAvailable(); - return client.signatureUrl(...args); - }; - - proto.getObjectUrl = function getObjectUrl(/* name, baseUrl */...args) { - const client = this.chooseAvailable(); - return client.getObjectUrl(...args); - }; - - proto._init = function _init() { - const that = this; - (async () => { - await that._checkAvailable(that._ignoreStatusFile); - that.ready(true); - })().catch(err => { - that.emit('error', err); - }); - }; - - proto._checkAvailable = async function _checkAvailable(ignoreStatusFile) { - const name = `._ali-oss/check.status.${this._currentIP}.txt`; - if (!ignoreStatusFile) { - // only start will try to write the file - await this.put(name, Buffer.from(`check available started at ${Date()}`)); - } - - if (this._checkAvailableLock) { - return; - } - this._checkAvailableLock = true; - const downStatusFiles = []; - for (let i = 0; i < this.clients.length; i++) { - const client = this.clients[i]; - // check 3 times - let available = await this._checkStatus(client, name); - if (!available) { - // check again - available = await this._checkStatus(client, name); - } - if (!available) { - // check again - /* eslint no-await-in-loop: [0] */ - available = await this._checkStatus(client, name); - if (!available) { - downStatusFiles.push(client._objectUrl(name)); - } - } - this.availables[i] = available; - } - this._checkAvailableLock = false; - - if (downStatusFiles.length > 0) { - const err = new Error(`${downStatusFiles.length} data node down, please check status file: ${downStatusFiles.join(', ')}`); - err.name = 'CheckAvailableError'; - this.emit('error', err); - } - }; - - proto._checkStatus = async function _checkStatus(client, name) { - let available = true; - try { - await client.head(name); - } catch (err) { - // 404 will be available too - if (!err.status || err.status >= 500 || err.status < 200) { - available = false; - } - } - return available; - }; - - proto.chooseAvailable = function chooseAvailable() { - if (this.schedule === MS) { - // only read from master - if (this.masterOnly) { - return this.clients[0]; - } - for (let i = 0; i < this.clients.length; i++) { - if (this.availables[i]) { - return this.clients[i]; - } - } - // all down, try to use this first one - return this.clients[0]; - } - - // RR - let n = this.clients.length; - while (n > 0) { - const i = this._nextRRIndex(); - if (this.availables[i]) { - return this.clients[i]; - } - n--; - } - // all down, try to use this first one - return this.clients[0]; - }; - - proto._nextRRIndex = function _nextRRIndex() { - const index = this.index++; - if (this.index >= this.clients.length) { - this.index = 0; - } - return index; - }; - - proto._error = function error(err) { - if (err) throw err; - }; - - proto._createCallback = function _createCallback(ctx, gen, cb) { - return () => { - cb = cb || this._error; - gen.call(ctx).then(() => { - cb(); - }, cb); - }; - }; - proto._deferInterval = function _deferInterval(gen, timeout, cb) { - return setInterval(this._createCallback(this, gen, cb), timeout); - }; - - proto.close = function close() { - clearInterval(this._timerId); - this._timerId = null; - }; - - return Client; -}; diff --git a/lib/common/bucket/abortBucketWorm.js b/lib/common/bucket/abortBucketWorm.js deleted file mode 100644 index 413d4fd0e..000000000 --- a/lib/common/bucket/abortBucketWorm.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function abortBucketWorm(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'worm', options); - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.abortBucketWorm = abortBucketWorm; diff --git a/lib/common/bucket/completeBucketWorm.js b/lib/common/bucket/completeBucketWorm.js deleted file mode 100644 index 443f7b154..000000000 --- a/lib/common/bucket/completeBucketWorm.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function completeBucketWorm(name, wormId, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, { wormId }, options); - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.completeBucketWorm = completeBucketWorm; diff --git a/lib/common/bucket/deleteBucketEncryption.js b/lib/common/bucket/deleteBucketEncryption.js deleted file mode 100644 index d74a86298..000000000 --- a/lib/common/bucket/deleteBucketEncryption.js +++ /dev/null @@ -1,19 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -/** - * deleteBucketEncryption - * @param {String} bucketName - bucket name - */ -proto.deleteBucketEncryption = async function deleteBucketEncryption(bucketName) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('DELETE', bucketName, 'encryption'); - params.successStatuses = [ 204 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketInventory.js b/lib/common/bucket/deleteBucketInventory.js deleted file mode 100644 index d2cb362e8..000000000 --- a/lib/common/bucket/deleteBucketInventory.js +++ /dev/null @@ -1,20 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -/** - * deleteBucketInventory - * @param {String} bucketName - bucket name - * @param {String} inventoryId - inventory id - * @param {Object} options - options - */ -async function deleteBucketInventory(bucketName, inventoryId, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('DELETE', bucketName, subres, options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -} -exports.deleteBucketInventory = deleteBucketInventory; diff --git a/lib/common/bucket/deleteBucketLifecycle.js b/lib/common/bucket/deleteBucketLifecycle.js deleted file mode 100644 index 8d96683b0..000000000 --- a/lib/common/bucket/deleteBucketLifecycle.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketPolicy.js b/lib/common/bucket/deleteBucketPolicy.js deleted file mode 100644 index ecd93c419..000000000 --- a/lib/common/bucket/deleteBucketPolicy.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * deleteBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.deleteBucketPolicy = async function deleteBucketPolicy(bucketName, options = {}) { - _checkBucketName(bucketName); - - const params = this._bucketRequestParams('DELETE', bucketName, 'policy', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketTags.js b/lib/common/bucket/deleteBucketTags.js deleted file mode 100644 index 657fffed9..000000000 --- a/lib/common/bucket/deleteBucketTags.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * deleteBucketTags - * @param {String} name - bucket name - * @param {Object} options - */ - -proto.deleteBucketTags = async function deleteBucketTags(name, options = {}) { - _checkBucketName(name); - - const params = this._bucketRequestParams('DELETE', name, 'tagging', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketWebsite.js b/lib/common/bucket/deleteBucketWebsite.js deleted file mode 100644 index e8ea12fb3..000000000 --- a/lib/common/bucket/deleteBucketWebsite.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -proto.deleteBucketWebsite = async function deleteBucketWebsite(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'website', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/bucket/extendBucketWorm.js b/lib/common/bucket/extendBucketWorm.js deleted file mode 100644 index ce1a89d3e..000000000 --- a/lib/common/bucket/extendBucketWorm.js +++ /dev/null @@ -1,22 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -async function extendBucketWorm(name, wormId, days, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, { wormExtend: '', wormId }, options); - const paramlXMLObJ = { - ExtendWormConfiguration: { - RetentionPeriodInDays: days, - }, - }; - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.extendBucketWorm = extendBucketWorm; diff --git a/lib/common/bucket/getBucketEncryption.js b/lib/common/bucket/getBucketEncryption.js deleted file mode 100644 index 3ac646254..000000000 --- a/lib/common/bucket/getBucketEncryption.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketEncryption - * @param {String} bucketName - bucket name - */ - -proto.getBucketEncryption = async function getBucketEncryption(bucketName) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'encryption'); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const encryption = result.data.ApplyServerSideEncryptionByDefault; - return { - encryption, - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketInventory.js b/lib/common/bucket/getBucketInventory.js deleted file mode 100644 index 55c9ad0ab..000000000 --- a/lib/common/bucket/getBucketInventory.js +++ /dev/null @@ -1,23 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { formatInventoryConfig } = require('../utils/formatInventoryConfig'); - -/** - * getBucketInventory - * @param {String} bucketName - bucket name - * @param {String} inventoryId - inventory id - * @param {Object} options - options - */ -async function getBucketInventory(bucketName, inventoryId, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - inventory: formatInventoryConfig(result.data), - }; -} -exports.getBucketInventory = getBucketInventory; diff --git a/lib/common/bucket/getBucketLifecycle.js b/lib/common/bucket/getBucketLifecycle.js deleted file mode 100644 index 4293acefb..000000000 --- a/lib/common/bucket/getBucketLifecycle.js +++ /dev/null @@ -1,33 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { formatObjKey } = require('../utils/formatObjKey'); - -const proto = exports; - -proto.getBucketLifecycle = async function getBucketLifecycle(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'lifecycle', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - let rules = result.data.Rule || null; - if (rules) { - if (!Array.isArray(rules)) { - rules = [ rules ]; - } - rules = rules.map(_ => { - if (_.ID) { - _.id = _.ID; - delete _.ID; - } - if (_.Tag && !Array.isArray(_.Tag)) { - _.Tag = [ _.Tag ]; - } - return formatObjKey(_, 'firstLowerCase'); - }); - } - return { - rules, - res: result.res, - }; -}; - diff --git a/lib/common/bucket/getBucketPolicy.js b/lib/common/bucket/getBucketPolicy.js deleted file mode 100644 index 03375c74f..000000000 --- a/lib/common/bucket/getBucketPolicy.js +++ /dev/null @@ -1,28 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.getBucketPolicy = async function getBucketPolicy(bucketName, options = {}) { - _checkBucketName(bucketName); - - const params = this._bucketRequestParams('GET', bucketName, 'policy', options); - - const result = await this.request(params); - params.successStatuses = [ 200 ]; - let policy = null; - - if (result.res.status === 200) { - policy = JSON.parse(result.res.data.toString()); - } - - return { - policy, - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketRequestPayment.js b/lib/common/bucket/getBucketRequestPayment.js deleted file mode 100644 index 189bfac84..000000000 --- a/lib/common/bucket/getBucketRequestPayment.js +++ /dev/null @@ -1,25 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketRequestPayment - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.getBucketRequestPayment = async function getBucketRequestPayment(bucketName, options) { - options = options || {}; - - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'requestPayment', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - payer: result.data.Payer, - }; -}; diff --git a/lib/common/bucket/getBucketStat.d.ts b/lib/common/bucket/getBucketStat.d.ts deleted file mode 100644 index 0e71a3e77..000000000 --- a/lib/common/bucket/getBucketStat.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -declare type bucketStatRes = { - Storage: string; - ObjectCount: string; - MultipartUploadCount: string; - LiveChannelCount: string; - LastModifiedTime: string; - StandardStorage: string; - StandardObjectCount: string; - InfrequentAccessStorage: string; - InfrequentAccessRealStorage: string; - InfrequentAccessObjectCount: string; - ArchiveStorage: string; - ArchiveRealStorage: string; - ArchiveObjectCount: string; - ColdArchiveStorage: string; - ColdArchiveRealStorage: string; - ColdArchiveObjectCount: string; -}; -export declare function getBucketStat(this: any, name: string, options: {}): Promise<{ - res: any; - stat: bucketStatRes; -}>; -export {}; diff --git a/lib/common/bucket/getBucketStat.js b/lib/common/bucket/getBucketStat.js deleted file mode 100644 index 7d14e3429..000000000 --- a/lib/common/bucket/getBucketStat.js +++ /dev/null @@ -1,16 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function getBucketStat(name, options) { - name = name || this.options.bucket; - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'stat', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - res: result.res, - stat: result.data, - }; -} - -exports.getBucketStat = getBucketStat; diff --git a/lib/common/bucket/getBucketTags.js b/lib/common/bucket/getBucketTags.js deleted file mode 100644 index 23aa5245e..000000000 --- a/lib/common/bucket/getBucketTags.js +++ /dev/null @@ -1,24 +0,0 @@ -const proto = exports; -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { formatTag } = require('../utils/formatTag'); -/** - * getBucketTags - * @param {String} name - bucket name - * @param {Object} options - * @return {Object} - */ - -proto.getBucketTags = async function getBucketTags(name, options = {}) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'tagging', options); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - const Tagging = await this.parseXML(result.data); - - - return { - status: result.status, - res: result.res, - tag: formatTag(Tagging), - }; -}; diff --git a/lib/common/bucket/getBucketVersioning.js b/lib/common/bucket/getBucketVersioning.js deleted file mode 100644 index 5c40780b7..000000000 --- a/lib/common/bucket/getBucketVersioning.js +++ /dev/null @@ -1,22 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketVersioning - * @param {String} bucketName - bucket name - */ - -proto.getBucketVersioning = async function getBucketVersioning(bucketName, options) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'versioning', options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - - const versionStatus = result.data.Status; - return { - status: result.status, - versionStatus, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketWebsite.js b/lib/common/bucket/getBucketWebsite.js deleted file mode 100644 index 5869ee9d4..000000000 --- a/lib/common/bucket/getBucketWebsite.js +++ /dev/null @@ -1,28 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { isObject } = require('../utils/isObject'); - -const proto = exports; - -proto.getBucketWebsite = async function getBucketWebsite(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'website', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - let routingRules = []; - if (result.data.RoutingRules && result.data.RoutingRules.RoutingRule) { - if (isObject(result.data.RoutingRules.RoutingRule)) { - routingRules = [ result.data.RoutingRules.RoutingRule ]; - } else { - routingRules = result.data.RoutingRules.RoutingRule; - } - } - return { - index: (result.data.IndexDocument && result.data.IndexDocument.Suffix) || '', - supportSubDir: (result.data.IndexDocument && result.data.IndexDocument.SupportSubDir) || 'false', - type: (result.data.IndexDocument && result.data.IndexDocument.Type), - routingRules, - error: (result.data.ErrorDocument && result.data.ErrorDocument.Key) || null, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketWorm.js b/lib/common/bucket/getBucketWorm.js deleted file mode 100644 index 504b2798a..000000000 --- a/lib/common/bucket/getBucketWorm.js +++ /dev/null @@ -1,19 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { dataFix } = require('../utils/dataFix'); - -async function getBucketWorm(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'worm', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - dataFix(result.data, { - lowerFirst: true, - rename: { - RetentionPeriodInDays: 'days', - }, - }); - return Object.assign(Object.assign({}, result.data), { res: result.res, status: result.status }); -} - -exports.getBucketWorm = getBucketWorm; diff --git a/lib/common/bucket/index.js b/lib/common/bucket/index.js deleted file mode 100644 index 3e394e079..000000000 --- a/lib/common/bucket/index.js +++ /dev/null @@ -1,34 +0,0 @@ -const merge = require('merge-descriptors'); - -const proto = exports; - -merge(proto, require('./getBucketRequestPayment')); -merge(proto, require('./putBucketRequestPayment')); -merge(proto, require('./putBucketEncryption')); -merge(proto, require('./getBucketEncryption')); -merge(proto, require('./deleteBucketEncryption')); -merge(proto, require('./getBucketTags')); -merge(proto, require('./putBucketTags')); -merge(proto, require('./deleteBucketTags')); -merge(proto, require('./putBucket')); -merge(proto, require('./getBucketWebsite')); -merge(proto, require('./putBucketWebsite')); -merge(proto, require('./deleteBucketWebsite')); -merge(proto, require('./getBucketLifecycle')); -merge(proto, require('./putBucketLifecycle')); -merge(proto, require('./deleteBucketLifecycle')); -merge(proto, require('./getBucketPolicy')); -merge(proto, require('./putBucketPolicy')); -merge(proto, require('./deleteBucketPolicy')); -merge(proto, require('./getBucketVersioning')); -merge(proto, require('./putBucketVersioning')); -merge(proto, require('./getBucketInventory')); -merge(proto, require('./deleteBucketInventory')); -merge(proto, require('./listBucketInventory')); -merge(proto, require('./putBucketInventory')); -merge(proto, require('./abortBucketWorm')); -merge(proto, require('./completeBucketWorm')); -merge(proto, require('./extendBucketWorm')); -merge(proto, require('./getBucketWorm')); -merge(proto, require('./initiateBucketWorm')); -merge(proto, require('./getBucketStat')); diff --git a/lib/common/bucket/initiateBucketWorm.d.ts b/lib/common/bucket/initiateBucketWorm.d.ts deleted file mode 100644 index c56703192..000000000 --- a/lib/common/bucket/initiateBucketWorm.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export declare function initiateBucketWorm(this: any, name: string, days: string, options: any): Promise<{ - res: any; - wormId: any; - status: any; -}>; diff --git a/lib/common/bucket/initiateBucketWorm.js b/lib/common/bucket/initiateBucketWorm.js deleted file mode 100644 index 3fd66e25d..000000000 --- a/lib/common/bucket/initiateBucketWorm.js +++ /dev/null @@ -1,23 +0,0 @@ -const { obj2xml } = require('../utils/obj2xml'); -const { checkBucketName } = require('../utils/checkBucketName'); - -async function initiateBucketWorm(name, days, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, 'worm', options); - const paramlXMLObJ = { - InitiateWormConfiguration: { - RetentionPeriodInDays: days, - }, - }; - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - wormId: result.res.headers['x-oss-worm-id'], - status: result.status, - }; -} - -exports.initiateBucketWorm = initiateBucketWorm; diff --git a/lib/common/bucket/listBucketInventory.js b/lib/common/bucket/listBucketInventory.js deleted file mode 100644 index fc2c26031..000000000 --- a/lib/common/bucket/listBucketInventory.js +++ /dev/null @@ -1,26 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { formatInventoryConfig } = require('../utils/formatInventoryConfig'); - -/** - * listBucketInventory - * @param {String} bucketName - bucket name - * @param {Object} options - options - */ -async function listBucketInventory(bucketName, options = {}) { - const { continuationToken } = options; - const subres = Object.assign({ inventory: '' }, continuationToken && { 'continuation-token': continuationToken }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const { data, res, status } = result; - return { - isTruncated: data.IsTruncated === 'true', - nextContinuationToken: data.NextContinuationToken, - inventoryList: formatInventoryConfig(data.InventoryConfiguration, true), - status, - res, - }; -} -exports.listBucketInventory = listBucketInventory; diff --git a/lib/common/bucket/putBucket.js b/lib/common/bucket/putBucket.js deleted file mode 100644 index 6282ac2b7..000000000 --- a/lib/common/bucket/putBucket.js +++ /dev/null @@ -1,31 +0,0 @@ -const proto = exports; -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -proto.putBucket = async function putBucket(name, options = {}) { - _checkBucketName(name, true); - const params = this._bucketRequestParams('PUT', name, '', options); - - const CreateBucketConfiguration = {}; - const paramlXMLObJ = { - CreateBucketConfiguration, - }; - - const storageClass = options.StorageClass || options.storageClass; - const dataRedundancyType = options.DataRedundancyType || options.dataRedundancyType; - if (storageClass || dataRedundancyType) { - storageClass && (CreateBucketConfiguration.StorageClass = storageClass); - dataRedundancyType && (CreateBucketConfiguration.DataRedundancyType = dataRedundancyType); - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - } - const { acl, headers = {} } = options; - acl && (headers['x-oss-acl'] = acl); - params.headers = headers; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - bucket: (result.headers.location && result.headers.location.substring(1)) || null, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketEncryption.js b/lib/common/bucket/putBucketEncryption.js deleted file mode 100644 index 4a0a0aba9..000000000 --- a/lib/common/bucket/putBucketEncryption.js +++ /dev/null @@ -1,35 +0,0 @@ -const proto = exports; -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -/** - * putBucketEncryption - * @param {String} bucketName - bucket name - * @param {Object} options - options - */ -proto.putBucketEncryption = async function putBucketEncryption(bucketName, options) { - options = options || {}; - checkBucketName(bucketName); - const params = this._bucketRequestParams('PUT', bucketName, 'encryption', options); - params.successStatuses = [ 200 ]; - const paramXMLObj = { - ServerSideEncryptionRule: { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: options.SSEAlgorithm, - }, - }, - }; - if (options.KMSMasterKeyID !== undefined) { - paramXMLObj.ServerSideEncryptionRule.ApplyServerSideEncryptionByDefault.KMSMasterKeyID = options.KMSMasterKeyID; - } - const paramXML = obj2xml(paramXMLObj, { - headers: true, - }); - params.mime = 'xml'; - params.content = paramXML; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketInventory.d.ts b/lib/common/bucket/putBucketInventory.d.ts deleted file mode 100644 index 006faca52..000000000 --- a/lib/common/bucket/putBucketInventory.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -declare type Field = 'Size | LastModifiedDate | ETag | StorageClass | IsMultipartUploaded | EncryptionStatus'; -interface Inventory { - id: string; - isEnabled: true | false; - prefix?: string; - OSSBucketDestination: { - format: 'CSV'; - accountId: string; - rolename: string; - bucket: string; - prefix?: string; - encryption?: { - 'SSE-OSS': ''; - } | { - 'SSE-KMS': { - keyId: string; - }; - }; - }; - frequency: 'Daily' | 'Weekly'; - includedObjectVersions: 'Current' | 'All'; - optionalFields?: { - field?: Field[]; - }; -} -/** - * putBucketInventory - * @param {String} bucketName - bucket name - * @param {Inventory} inventory - * @param {Object} options - */ -export declare function putBucketInventory(this: any, bucketName: string, inventory: Inventory, options?: any): Promise<{ - status: any; - res: any; -}>; -export {}; diff --git a/lib/common/bucket/putBucketInventory.js b/lib/common/bucket/putBucketInventory.js deleted file mode 100644 index c98c75b44..000000000 --- a/lib/common/bucket/putBucketInventory.js +++ /dev/null @@ -1,56 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -/** - * putBucketInventory - * @param {String} bucketName - bucket name - * @param {Inventory} inventory - inventory - * @param {Object} options - options - */ -async function putBucketInventory(bucketName, inventory, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId: inventory.id }, options.subres); - checkBucketName(bucketName); - const { OSSBucketDestination, optionalFields, includedObjectVersions } = inventory; - const destinationBucketPrefix = 'acs:oss:::'; - const rolePrefix = `acs:ram::${OSSBucketDestination.accountId}:role/`; - const paramXMLObj = { - InventoryConfiguration: { - Id: inventory.id, - IsEnabled: inventory.isEnabled, - Filter: { - Prefix: inventory.prefix || '', - }, - Destination: { - OSSBucketDestination: { - Format: OSSBucketDestination.format, - AccountId: OSSBucketDestination.accountId, - RoleArn: `${rolePrefix}${OSSBucketDestination.rolename}`, - Bucket: `${destinationBucketPrefix}${OSSBucketDestination.bucket}`, - Prefix: OSSBucketDestination.prefix || '', - Encryption: OSSBucketDestination.encryption || '', - }, - }, - Schedule: { - Frequency: inventory.frequency, - }, - IncludedObjectVersions: includedObjectVersions, - OptionalFields: { - Field: (optionalFields === null || optionalFields === void 0 ? void 0 : optionalFields.field) || [], - }, - }, - }; - const paramXML = obj2xml(paramXMLObj, { - headers: true, - firstUpperCase: true, - }); - const params = this._bucketRequestParams('PUT', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.mime = 'xml'; - params.content = paramXML; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -} -exports.putBucketInventory = putBucketInventory; diff --git a/lib/common/bucket/putBucketLifecycle.js b/lib/common/bucket/putBucketLifecycle.js deleted file mode 100644 index dc973a12f..000000000 --- a/lib/common/bucket/putBucketLifecycle.js +++ /dev/null @@ -1,125 +0,0 @@ -/* eslint-disable no-use-before-define */ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { deepCopy } = require('../utils/deepCopy'); -const { isObject } = require('../utils/isObject'); -const { obj2xml } = require('../utils/obj2xml'); -const { checkObjectTag } = require('../utils/checkObjectTag'); -const { getStrBytesCount } = require('../utils/getStrBytesCount'); - -const proto = exports; - - -proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) { - _checkBucketName(name); - - if (!Array.isArray(rules)) { - throw new Error('rules must be Array'); - } - - const params = this._bucketRequestParams('PUT', name, 'lifecycle', options); - const Rule = []; - const paramXMLObj = { - LifecycleConfiguration: { - Rule, - }, - }; - - rules.forEach(_ => { - defaultDaysAndDate2Expiration(_); // todo delete, 兼容旧版本 - checkRule(_); - if (_.id) { - _.ID = _.id; - delete _.id; - } - Rule.push(_); - }); - - const paramXML = obj2xml(paramXMLObj, { - headers: true, - firstUpperCase: true, - }); - - params.content = paramXML; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -// todo delete, 兼容旧版本 -function defaultDaysAndDate2Expiration(obj) { - if (obj.days) { - obj.expiration = { - days: obj.days, - }; - } - if (obj.date) { - obj.expiration = { - createdBeforeDate: obj.date, - }; - } -} - -function checkDaysAndDate(obj, key) { - const { days, createdBeforeDate } = obj; - if (!days && !createdBeforeDate) { - throw new Error(`${key} must includes days or createdBeforeDate`); - } else if (days && !/^[1-9][0-9]*$/.test(days)) { - throw new Error('days must be a positive integer'); - } else if (createdBeforeDate && !/\d{4}-\d{2}-\d{2}T00:00:00.000Z/.test(createdBeforeDate)) { - throw new Error('createdBeforeDate must be date and conform to iso8601 format'); - } -} - -function handleCheckTag(tag) { - if (!Array.isArray(tag) && !isObject(tag)) { - throw new Error('tag must be Object or Array'); - } - tag = isObject(tag) ? [ tag ] : tag; - const tagObj = {}; - const tagClone = deepCopy(tag); - tagClone.forEach(v => { - tagObj[v.key] = v.value; - }); - - checkObjectTag(tagObj); -} - -function checkRule(rule) { - if (rule.id && getStrBytesCount(rule.id) > 255) throw new Error('ID is composed of 255 bytes at most'); - - if (rule.prefix === undefined) throw new Error('Rule must includes prefix'); - - if (![ 'Enabled', 'Disabled' ].includes(rule.status)) throw new Error('Status must be Enabled or Disabled'); - - if (rule.transition) { - if (![ 'IA', 'Archive' ].includes(rule.transition.storageClass)) throw new Error('StorageClass must be IA or Archive'); - checkDaysAndDate(rule.transition, 'Transition'); - } - - if (rule.expiration) { - if (!rule.expiration.expiredObjectDeleteMarker) { - checkDaysAndDate(rule.expiration, 'Expiration'); - } else if (rule.expiration.days || rule.expiration.createdBeforeDate) { - throw new Error('expiredObjectDeleteMarker cannot be used with days or createdBeforeDate'); - } - } - - if (rule.abortMultipartUpload) { - checkDaysAndDate(rule.abortMultipartUpload, 'AbortMultipartUpload'); - } - - if (!rule.expiration && !rule.abortMultipartUpload && !rule.transition && !rule.noncurrentVersionTransition) { - throw new Error('Rule must includes expiration or abortMultipartUpload or transition or noncurrentVersionTransition'); - } - - if (rule.tag) { - if (rule.abortMultipartUpload) { - throw new Error('Tag cannot be used with abortMultipartUpload'); - } - handleCheckTag(rule.tag); - } -} - diff --git a/lib/common/bucket/putBucketPolicy.js b/lib/common/bucket/putBucketPolicy.js deleted file mode 100644 index eb40fe076..000000000 --- a/lib/common/bucket/putBucketPolicy.js +++ /dev/null @@ -1,27 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { policy2Str } = require('../utils/policy2Str'); -const { isObject } = require('../utils/isObject'); - -const proto = exports; -/** - * putBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} policy - bucket policy - * @param {Object} options - */ - -proto.putBucketPolicy = async function putBucketPolicy(bucketName, policy, options = {}) { - _checkBucketName(bucketName); - - if (!isObject(policy)) { - throw new Error('policy is not Object'); - } - const params = this._bucketRequestParams('PUT', bucketName, 'policy', options); - params.content = policy2Str(policy); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketRequestPayment.js b/lib/common/bucket/putBucketRequestPayment.js deleted file mode 100644 index 0262ae3e4..000000000 --- a/lib/common/bucket/putBucketRequestPayment.js +++ /dev/null @@ -1,49 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -/** - * putBucketRequestPayment - * @param {String} bucketName - * @param {String} payer - * @param {Object} options - */ -const payerAll = [ 'BucketOwner', 'Requester' ]; - -proto.putBucketRequestPayment = async function putBucketRequestPayment( - bucketName, - payer, - options -) { - options = options || {}; - if (!payer || payerAll.indexOf(payer) < 0) { - throw new Error('payer must be BucketOwner or Requester'); - } - - _checkBucketName(bucketName); - const params = this._bucketRequestParams( - 'PUT', - bucketName, - 'requestPayment', - options - ); - params.successStatuses = [ 200 ]; - - const paramXMLObj = { - RequestPaymentConfiguration: { - Payer: payer, - }, - }; - const paramXML = obj2xml(paramXMLObj, { - headers: true, - }); - - params.mime = 'xml'; - params.content = paramXML; - - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketTags.js b/lib/common/bucket/putBucketTags.js deleted file mode 100644 index 3833aad5d..000000000 --- a/lib/common/bucket/putBucketTags.js +++ /dev/null @@ -1,39 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); -const { checkBucketTag } = require('../utils/checkBucketTag'); - -const proto = exports; -/** - * putBucketTags - * @param {String} name - bucket name - * @param {Object} tag - bucket tag, eg: `{a: "1", b: "2"}` - * @param {Object} options - */ - -proto.putBucketTags = async function putBucketTags(name, tag, options = {}) { - _checkBucketName(name); - checkBucketTag(tag); - const params = this._bucketRequestParams('PUT', name, 'tagging', options); - params.successStatuses = [ 200 ]; - tag = Object.keys(tag).map(key => ({ - Key: key, - Value: tag[key], - })); - - const paramXMLObj = { - Tagging: { - TagSet: { - Tag: tag, - }, - }, - }; - - params.mime = 'xml'; - params.content = obj2xml(paramXMLObj); - - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -}; diff --git a/lib/common/bucket/putBucketVersioning.js b/lib/common/bucket/putBucketVersioning.js deleted file mode 100644 index fa19a8397..000000000 --- a/lib/common/bucket/putBucketVersioning.js +++ /dev/null @@ -1,35 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -/** - * putBucketVersioning - * @param {String} name - bucket name - * @param {String} status - * @param {Object} options - */ - -proto.putBucketVersioning = async function putBucketVersioning(name, status, options = {}) { - _checkBucketName(name); - if (![ 'Enabled', 'Suspended' ].includes(status)) { - throw new Error('status must be Enabled or Suspended'); - } - const params = this._bucketRequestParams('PUT', name, 'versioning', options); - - const paramXMLObj = { - VersioningConfiguration: { - Status: status, - }, - }; - - params.mime = 'xml'; - params.content = obj2xml(paramXMLObj, { - headers: true, - }); - - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -}; diff --git a/lib/common/bucket/putBucketWebsite.js b/lib/common/bucket/putBucketWebsite.js deleted file mode 100644 index 93445216b..000000000 --- a/lib/common/bucket/putBucketWebsite.js +++ /dev/null @@ -1,49 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -proto.putBucketWebsite = async function putBucketWebsite(name, config = {}, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('PUT', name, 'website', options); - const IndexDocument = { - Suffix: config.index || 'index.html', - }; - const WebsiteConfiguration = { - IndexDocument, - }; - let website = { - WebsiteConfiguration, - }; - - if (config.supportSubDir) { - IndexDocument.SupportSubDir = config.supportSubDir; - } - - if (config.type) { - IndexDocument.Type = config.type; - } - - if (config.error) { - WebsiteConfiguration.ErrorDocument = { - Key: config.error, - }; - } - - if (config.routingRules !== undefined) { - if (!Array.isArray(config.routingRules)) { - throw new Error('RoutingRules must be Array'); - } - WebsiteConfiguration.RoutingRules = { - RoutingRule: config.routingRules, - }; - } - - website = obj2xml(website); - params.content = website; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/callback.js b/lib/common/callback.js deleted file mode 100644 index 52aae99ce..000000000 --- a/lib/common/callback.js +++ /dev/null @@ -1,29 +0,0 @@ - - -exports.encodeCallback = function encodeCallback(reqParams, options) { - reqParams.headers = reqParams.headers || {}; - if (!Object.prototype.hasOwnProperty.call(reqParams.headers, 'x-oss-callback')) { - if (options.callback) { - const json = { - callbackUrl: encodeURI(options.callback.url), - callbackBody: options.callback.body, - }; - if (options.callback.host) { - json.callbackHost = options.callback.host; - } - if (options.callback.contentType) { - json.callbackBodyType = options.callback.contentType; - } - const callback = Buffer.from(JSON.stringify(json)).toString('base64'); - reqParams.headers['x-oss-callback'] = callback; - - if (options.callback.customValue) { - const callbackVar = {}; - Object.keys(options.callback.customValue).forEach(key => { - callbackVar[`x:${key}`] = options.callback.customValue[key].toString(); - }); - reqParams.headers['x-oss-callback-var'] = Buffer.from(JSON.stringify(callbackVar)).toString('base64'); - } - } - } -}; diff --git a/lib/common/client/getReqUrl.js b/lib/common/client/getReqUrl.js deleted file mode 100644 index bdae93c12..000000000 --- a/lib/common/client/getReqUrl.js +++ /dev/null @@ -1,46 +0,0 @@ -const copy = require('copy-to'); -const { format: urlformat } = require('url'); -const merge = require('merge-descriptors'); -const is = require('is-type-of'); -const isIP_1 = require('../utils/isIP'); -const { checkConfigValid } = require('../utils/checkConfigValid'); - -function getReqUrl(params) { - const ep = {}; - const isCname = this.options.cname; - checkConfigValid(this.options.endpoint, 'endpoint'); - copy(this.options.endpoint, false).to(ep); - if (params.bucket && !isCname && !isIP_1.isIP(ep.hostname) && !this.options.sldEnable) { - ep.host = `${params.bucket}.${ep.host}`; - } - let resourcePath = '/'; - if (params.bucket && (this.options.sldEnable)) { - resourcePath += `${params.bucket}/`; - } - if (params.object) { - // Preserve '/' in result url - resourcePath += this._escape(params.object).replace(/\+/g, '%2B'); - } - ep.pathname = resourcePath; - const query = {}; - if (params.query) { - merge(query, params.query); - } - if (params.subres) { - let subresAsQuery = {}; - if (is.string(params.subres)) { - subresAsQuery[params.subres] = ''; - } else if (is.array(params.subres)) { - params.subres.forEach(k => { - subresAsQuery[k] = ''; - }); - } else { - subresAsQuery = params.subres; - } - merge(query, subresAsQuery); - } - ep.query = query; - return urlformat(ep); -} - -exports.getReqUrl = getReqUrl; diff --git a/lib/common/client/initOptions.js b/lib/common/client/initOptions.js deleted file mode 100644 index 6c2306c38..000000000 --- a/lib/common/client/initOptions.js +++ /dev/null @@ -1,71 +0,0 @@ -const ms = require('humanize-ms'); -const urlutil = require('url'); -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { setRegion } = require('../utils/setRegion'); -const { checkConfigValid } = require('../utils/checkConfigValid'); - -function setEndpoint(endpoint, secure) { - checkConfigValid(endpoint, 'endpoint'); - let url = urlutil.parse(endpoint); - - if (!url.protocol) { - url = urlutil.parse(`http${secure ? 's' : ''}://${endpoint}`); - } - - if (url.protocol !== 'http:' && url.protocol !== 'https:') { - throw new Error('Endpoint protocol must be http or https.'); - } - - return url; -} - -module.exports = function(options) { - if (!options || !options.accessKeyId || !options.accessKeySecret) { - throw new Error('require accessKeyId, accessKeySecret'); - } - if (options.stsToken && !options.refreshSTSToken && !options.refreshSTSTokenInterval) { - console.warn( - "It's recommended to set 'refreshSTSToken' and 'refreshSTSTokenInterval' to refresh" + - ' stsToken、accessKeyId、accessKeySecret automatically when sts token has expired' - ); - } - if (options.bucket) { - _checkBucketName(options.bucket); - } - const opts = Object.assign( - { - region: 'oss-cn-hangzhou', - internal: false, - secure: false, - timeout: 60000, - bucket: null, - endpoint: null, - cname: false, - isRequestPay: false, - sldEnable: false, - headerEncoding: 'utf-8', - refreshSTSToken: null, - refreshSTSTokenInterval: 60000 * 5, - retryMax: 0, - }, - options - ); - - opts.accessKeyId = opts.accessKeyId.trim(); - opts.accessKeySecret = opts.accessKeySecret.trim(); - - if (opts.timeout) { - opts.timeout = ms(opts.timeout); - } - - if (opts.endpoint) { - opts.endpoint = setEndpoint(opts.endpoint, opts.secure); - } else if (opts.region) { - opts.endpoint = setRegion(opts.region, opts.internal, opts.secure); - } else { - throw new Error('require options.endpoint or options.region'); - } - - opts.inited = true; - return opts; -}; diff --git a/lib/common/image/index.js b/lib/common/image/index.js deleted file mode 100644 index 1e319c289..000000000 --- a/lib/common/image/index.js +++ /dev/null @@ -1,5 +0,0 @@ -const merge = require('merge-descriptors'); - -const proto = exports; - -merge(proto, require('./processObjectSave')); diff --git a/lib/common/image/processObjectSave.js b/lib/common/image/processObjectSave.js deleted file mode 100644 index aa8f9ac61..000000000 --- a/lib/common/image/processObjectSave.js +++ /dev/null @@ -1,42 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const querystring = require('querystring'); -const { base64encode } = require('utility'); - -const proto = exports; - -proto.processObjectSave = async function processObjectSave(sourceObject, targetObject, process, targetBucket) { - checkArgs(sourceObject, 'sourceObject'); - checkArgs(targetObject, 'targetObject'); - checkArgs(process, 'process'); - targetObject = this._objectName(targetObject); - if (targetBucket) { - _checkBucketName(targetBucket); - } - - const params = this._objectRequestParams('POST', sourceObject, { - subres: 'x-oss-process', - }); - - const bucketParam = targetBucket ? `,b_${base64encode(targetBucket)}` : ''; - targetObject = base64encode(targetObject); - - const content = { - 'x-oss-process': `${process}|sys/saveas,o_${targetObject}${bucketParam}`, - }; - params.content = querystring.stringify(content); - - const result = await this.request(params); - return { - res: result.res, - status: result.res.status, - }; -}; - -function checkArgs(name, key) { - if (!name) { - throw new Error(`${key} is required`); - } - if (typeof name !== 'string') { - throw new Error(`${key} must be String`); - } -} diff --git a/lib/common/multipart-copy.js b/lib/common/multipart-copy.js deleted file mode 100644 index c630b2d6d..000000000 --- a/lib/common/multipart-copy.js +++ /dev/null @@ -1,233 +0,0 @@ -/* eslint-disable no-async-promise-executor */ - -const debug = require('util').debuglog('oss-client:multipart-copy'); -const copy = require('copy-to'); - -const proto = exports; - - -/** - * Upload a part copy in a multipart from the source bucket/object - * used with initMultipartUpload and completeMultipartUpload. - * @param {String} name copy object name - * @param {String} uploadId the upload id - * @param {Number} partNo the part number - * @param {String} range like 0-102400 part size need to copy - * @param {Object} sourceData - * {String} sourceData.sourceKey the source object name - * {String} sourceData.sourceBucketName the source bucket name - * @param {Object} options - */ -/* eslint max-len: [0] */ -proto.uploadPartCopy = async function uploadPartCopy(name, uploadId, partNo, range, sourceData, options = {}) { - options.headers = options.headers || {}; - const versionId = options.versionId || (options.subres && options.subres.versionId) || null; - let copySource; - if (versionId) { - copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}?versionId=${versionId}`; - } else { - copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}`; - } - - options.headers['x-oss-copy-source'] = copySource; - if (range) { - options.headers['x-oss-copy-source-range'] = `bytes=${range}`; - } - - options.subres = { - partNumber: partNo, - uploadId, - }; - const params = this._objectRequestParams('PUT', name, options); - params.mime = options.mime; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - name, - etag: result.res.headers.etag, - res: result.res, - }; -}; - -/** - * @param {String} name copy object name - * @param {Object} sourceData - * {String} sourceData.sourceKey the source object name - * {String} sourceData.sourceBucketName the source bucket name - * {Number} sourceData.startOffset data copy start byte offset, e.g: 0 - * {Number} sourceData.endOffset data copy end byte offset, e.g: 102400 - * @param {Object} options - * {Number} options.partSize - */ -proto.multipartUploadCopy = async function multipartUploadCopy(name, sourceData, options = {}) { - this.resetCancelFlag(); - const { versionId = null } = options; - const metaOpt = { - versionId, - }; - const objectMeta = await this._getObjectMeta(sourceData.sourceBucketName, sourceData.sourceKey, metaOpt); - const fileSize = objectMeta.res.headers['content-length']; - sourceData.startOffset = sourceData.startOffset || 0; - sourceData.endOffset = sourceData.endOffset || fileSize; - - if (options.checkpoint && options.checkpoint.uploadId) { - return await this._resumeMultipartCopy(options.checkpoint, sourceData, options); - } - - const minPartSize = 100 * 1024; - - const copySize = sourceData.endOffset - sourceData.startOffset; - if (copySize < minPartSize) { - throw new Error(`copySize must not be smaller than ${minPartSize}`); - } - - if (options.partSize && options.partSize < minPartSize) { - throw new Error(`partSize must not be smaller than ${minPartSize}`); - } - - const init = await this.initMultipartUpload(name, options); - const { uploadId } = init; - const partSize = this._getPartSize(copySize, options.partSize); - - const checkpoint = { - name, - copySize, - partSize, - uploadId, - doneParts: [], - }; - - if (options && options.progress) { - await options.progress(0, checkpoint, init.res); - } - - return await this._resumeMultipartCopy(checkpoint, sourceData, options); -}; - -/* - * Resume multipart copy from checkpoint. The checkpoint will be - * updated after each successful part copy. - * @param {Object} checkpoint the checkpoint - * @param {Object} options - */ -proto._resumeMultipartCopy = async function _resumeMultipartCopy(checkpoint, sourceData, options) { - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - const { versionId = null } = options; - const metaOpt = { - versionId, - }; - const { - copySize, partSize, uploadId, doneParts, name, - } = checkpoint; - - const partOffs = this._divideMultipartCopyParts(copySize, partSize, sourceData.startOffset); - const numParts = partOffs.length; - - const uploadPartCopyOptions = { - headers: {}, - }; - - if (options.copyheaders) { - copy(options.copyheaders).to(uploadPartCopyOptions.headers); - } - if (versionId) { - copy(metaOpt).to(uploadPartCopyOptions); - } - - const uploadPartJob = function uploadPartJob(self, partNo, source) { - return new Promise(async (resolve, reject) => { - try { - if (!self.isCancel()) { - const pi = partOffs[partNo - 1]; - const range = `${pi.start}-${pi.end - 1}`; - - let result; - try { - result = await self.uploadPartCopy(name, uploadId, partNo, range, source, uploadPartCopyOptions); - } catch (error) { - if (error.status === 404) { - throw self._makeAbortEvent(); - } - throw error; - } - if (!self.isCancel()) { - debug(`content-range ${result.res.headers['content-range']}`); - doneParts.push({ - number: partNo, - etag: result.res.headers.etag, - }); - checkpoint.doneParts = doneParts; - - if (options && options.progress) { - await options.progress(doneParts.length / numParts, checkpoint, result.res); - } - } - } - resolve(); - } catch (err) { - err.partNum = partNo; - reject(err); - } - }); - }; - - const all = Array.from(new Array(numParts), (x, i) => i + 1); - const done = doneParts.map(p => p.number); - const todo = all.filter(p => done.indexOf(p) < 0); - const defaultParallel = 5; - const parallel = options.parallel || defaultParallel; - - // upload in parallel - const errors = await this._parallelNode(todo, parallel, uploadPartJob, sourceData); - - const abortEvent = errors.find(err => err.name === 'abort'); - if (abortEvent) throw abortEvent; - - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - - // check errors after all jobs are completed - if (errors && errors.length > 0) { - const err = errors[0]; - err.message = `Failed to copy some parts with error: ${err.toString()} part_num: ${err.partNum}`; - throw err; - } - - return await this.completeMultipartUpload(name, uploadId, doneParts, options); -}; - -proto._divideMultipartCopyParts = function _divideMultipartCopyParts(fileSize, partSize, startOffset) { - const numParts = Math.ceil(fileSize / partSize); - - const partOffs = []; - for (let i = 0; i < numParts; i++) { - const start = (partSize * i) + startOffset; - const end = Math.min(start + partSize, fileSize + startOffset); - - partOffs.push({ - start, - end, - }); - } - - return partOffs; -}; - -/** - * Get Object Meta - * @param {String} bucket bucket name - * @param {String} name object name - * @param {Object} options options - */ -proto._getObjectMeta = async function _getObjectMeta(bucket, name, options) { - const currentBucket = this.getBucket(); - this.setBucket(bucket); - const data = await this.head(name, options); - this.setBucket(currentBucket); - return data; -}; diff --git a/lib/common/multipart.js b/lib/common/multipart.js deleted file mode 100644 index 8ceb8f443..000000000 --- a/lib/common/multipart.js +++ /dev/null @@ -1,270 +0,0 @@ -const copy = require('copy-to'); -const callback = require('./callback'); -const { deepCopyWith } = require('./utils/deepCopy'); -const { omit } = require('./utils/omit'); - -const proto = exports; - -/** - * List the on-going multipart uploads - * https://help.aliyun.com/document_detail/31997.html - * @param {Object} query query - * @param {Object} options options - * @return {Array} the multipart uploads - */ -proto.listUploads = async function listUploads(query, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = 'uploads'; - const params = this._objectRequestParams('GET', '', opt); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - let uploads = result.data.Upload || []; - if (!Array.isArray(uploads)) { - uploads = [ uploads ]; - } - uploads = uploads.map(up => ({ - name: up.Key, - uploadId: up.UploadId, - initiated: up.Initiated, - })); - - return { - res: result.res, - uploads, - bucket: result.data.Bucket, - nextKeyMarker: result.data.NextKeyMarker, - nextUploadIdMarker: result.data.NextUploadIdMarker, - isTruncated: result.data.IsTruncated === 'true', - }; -}; - -/** - * List the done uploadPart parts - * @param {String} name object name - * @param {String} uploadId multipart upload id - * @param {Object} query query - * {Number} query.max-parts The maximum part number in the response of the OSS. Default value: 1000 - * {Number} query.part-number-marker Starting position of a specific list. - * {String} query.encoding-type Specify the encoding of the returned content and the encoding type. - * @param {Object} options options - * @return {Object} result - */ -proto.listParts = async function listParts(name, uploadId, query, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = { - uploadId, - }; - const params = this._objectRequestParams('GET', name, opt); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - uploadId: result.data.UploadId, - bucket: result.data.Bucket, - name: result.data.Key, - partNumberMarker: result.data.PartNumberMarker, - nextPartNumberMarker: result.data.NextPartNumberMarker, - maxParts: result.data.MaxParts, - isTruncated: result.data.IsTruncated, - parts: result.data.Part || [], - }; -}; - -/** - * Abort a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Object} options options - */ -proto.abortMultipartUpload = async function abortMultipartUpload(name, uploadId, options) { - this._stop(); - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = { uploadId }; - const params = this._objectRequestParams('DELETE', name, opt); - params.successStatuses = [ 204 ]; - - const result = await this.request(params); - return { - res: result.res, - }; -}; - -/** - * Initiate a multipart upload transaction - * @param {String} name the object name - * @param {Object} options options - * @return {String} upload id - */ -proto.initMultipartUpload = async function initMultipartUpload(name, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.headers = opt.headers || {}; - this._convertMetaToHeaders(options.meta, opt.headers); - - opt.subres = 'uploads'; - const params = this._objectRequestParams('POST', name, opt); - params.mime = options.mime; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - bucket: result.data.Bucket, - name: result.data.Key, - uploadId: result.data.UploadId, - }; -}; - -/** - * Upload a part in a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Integer} partNo the part number - * @param {File} file upload File, whole File - * @param {Integer} start part start bytes e.g: 102400 - * @param {Integer} end part end bytes e.g: 204800 - * @param {Object} options options - */ -proto.uploadPart = async function uploadPart(name, uploadId, partNo, file, start, end, options) { - const data = { - size: end - start, - }; - const isBrowserEnv = process && process.browser; - isBrowserEnv - ? (data.content = await this._createBuffer(file, start, end)) - : (data.stream = await this._createStream(file, start, end)); - return await this._uploadPart(name, uploadId, partNo, data, options); -}; - -/** - * Complete a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Array} parts the uploaded parts, each in the structure: - * {Integer} number partNo - * {String} etag part etag uploadPartCopy result.res.header.etag - * @param {Object} options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: - * customValue = { - * key1: 'value1', - * key2: 'value2' - * } - */ -proto.completeMultipartUpload = async function completeMultipartUpload(name, uploadId, parts, options) { - const completeParts = parts - .concat() - .sort((a, b) => a.number - b.number) - .filter((item, index, arr) => !index || item.number !== arr[index - 1].number); - let xml = '\n\n'; - for (let i = 0; i < completeParts.length; i++) { - const p = completeParts[i]; - xml += '\n'; - xml += `${p.number}\n`; - xml += `${p.etag}\n`; - xml += '\n'; - } - xml += ''; - - options = options || {}; - let opt = {}; - opt = deepCopyWith(options, _ => { - if (Buffer.isBuffer(_)) return null; - }); - opt.subres = { uploadId }; - opt.headers = omit(opt.headers, [ 'x-oss-server-side-encryption', 'x-oss-storage-class' ]); - - const params = this._objectRequestParams('POST', name, opt); - callback.encodeCallback(params, opt); - params.mime = 'xml'; - params.content = xml; - - if (!(params.headers && params.headers['x-oss-callback'])) { - params.xmlResponse = true; - } - params.successStatuses = [ 200 ]; - const result = await this.request(params); - - if (options.progress) { - await options.progress(1, null, result.res); - } - - const ret = { - res: result.res, - bucket: params.bucket, - name, - etag: result.res.headers.etag, - data: result.data, - }; - - if (params.headers && params.headers['x-oss-callback']) { - ret.data = JSON.parse(result.data.toString()); - } - - return ret; -}; - -/** - * Upload a part in a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Integer} partNo the part number - * @param {Object} data the body data - * @param {Object} options options - */ -proto._uploadPart = async function _uploadPart(name, uploadId, partNo, data, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.headers = { - 'Content-Length': data.size, - }; - - opt.subres = { - partNumber: partNo, - uploadId, - }; - const params = this._objectRequestParams('PUT', name, opt); - params.mime = opt.mime; - const isBrowserEnv = process && process.browser; - isBrowserEnv ? (params.content = data.content) : (params.stream = data.stream); - params.successStatuses = [ 200 ]; - params.disabledMD5 = options.disabledMD5; - - const result = await this.request(params); - - if (!result.res.headers.etag) { - throw new Error( - 'Please set the etag of expose-headers in OSS \n https://help.aliyun.com/document_detail/32069.html' - ); - } - if (data.stream) { - data.stream = null; - params.stream = null; - } - return { - name, - etag: result.res.headers.etag, - res: result.res, - }; -}; diff --git a/lib/common/object/asyncSignatureUrl.js b/lib/common/object/asyncSignatureUrl.js deleted file mode 100644 index 2027c6130..000000000 --- a/lib/common/object/asyncSignatureUrl.js +++ /dev/null @@ -1,45 +0,0 @@ -const urlutil = require('url'); -const utility = require('utility'); -const copy = require('copy-to'); -const signHelper = require('../../common/signUtils'); -const { isIP } = require('../utils/isIP'); -const { setSTSToken } = require('../utils/setSTSToken'); -const { isFunction } = require('../utils/isFunction'); -const proto = exports; - -proto.asyncSignatureUrl = async function asyncSignatureUrl(name, options) { - if (isIP(this.options.endpoint.hostname)) { - throw new Error('can not get the object URL when endpoint is IP'); - } - options = options || {}; - name = this._objectName(name); - options.method = options.method || 'GET'; - const expires = utility.timestamp() + (options.expires || 1800); - const params = { - bucket: this.options.bucket, - object: name, - }; - - const resource = this._getResource(params); - - if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { - await setSTSToken.call(this); - } - - if (this.options.stsToken) { - options['security-token'] = this.options.stsToken; - } - - const signRes = signHelper._signatureForURL(this.options.accessKeySecret, options, resource, expires); - - const url = urlutil.parse(this._getReqUrl(params)); - url.query = { - OSSAccessKeyId: this.options.accessKeyId, - Expires: expires, - Signature: signRes.Signature, - }; - - copy(signRes.subResource).to(url.query); - - return url.format(); -}; diff --git a/lib/common/object/calculatePostSignature.js b/lib/common/object/calculatePostSignature.js deleted file mode 100644 index 31c28130b..000000000 --- a/lib/common/object/calculatePostSignature.js +++ /dev/null @@ -1,36 +0,0 @@ - -const { policy2Str } = require('../utils/policy2Str'); -const signHelper = require('../signUtils'); -const { isObject } = require('../utils/isObject'); - -const proto = exports; - -/** - * @param {Object or JSON} policy specifies the validity of the fields in the request. - * @return {Object} params - * {String} params.OSSAccessKeyId - * {String} params.Signature - * {String} params.policy JSON text encoded with UTF-8 and Base64. - */ -proto.calculatePostSignature = function calculatePostSignature(policy) { - if (!isObject(policy) && typeof policy !== 'string') { - throw new Error('policy must be JSON string or Object'); - } - if (!isObject(policy)) { - try { - JSON.stringify(JSON.parse(policy)); - } catch (error) { - throw new Error('policy must be JSON string or Object'); - } - } - policy = Buffer.from(policy2Str(policy), 'utf8').toString('base64'); - - const Signature = signHelper.computeSignature(this.options.accessKeySecret, policy); - - const query = { - OSSAccessKeyId: this.options.accessKeyId, - Signature, - policy, - }; - return query; -}; diff --git a/lib/common/object/copyObject.js b/lib/common/object/copyObject.js deleted file mode 100644 index 31fc3a725..000000000 --- a/lib/common/object/copyObject.js +++ /dev/null @@ -1,74 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -const REPLACE_HEDERS = [ - 'content-type', - 'content-encoding', - 'content-language', - 'content-disposition', - 'cache-control', - 'expires', -]; - -proto.copy = async function copy(name, sourceName, bucketName, options) { - if (typeof bucketName === 'object') { - options = bucketName; // 兼容旧版本,旧版本第三个参数为options - } - options = options || {}; - options.headers = options.headers || {}; - - Object.keys(options.headers).forEach(key => { - options.headers[`x-oss-copy-source-${key.toLowerCase()}`] = options.headers[key]; - }); - if (options.meta || Object.keys(options.headers).find(_ => REPLACE_HEDERS.includes(_.toLowerCase()))) { - options.headers['x-oss-metadata-directive'] = 'REPLACE'; - } - this._convertMetaToHeaders(options.meta, options.headers); - - sourceName = this._getSourceName(sourceName, bucketName); - - if (options.versionId) { - sourceName = `${sourceName}?versionId=${options.versionId}`; - } - - options.headers['x-oss-copy-source'] = sourceName; - - const params = this._objectRequestParams('PUT', name, options); - params.xmlResponse = true; - params.successStatuses = [ 200, 304 ]; - - const result = await this.request(params); - - let { data } = result; - if (data) { - data = { - etag: data.ETag, - lastModified: data.LastModified, - }; - } - - return { - data, - res: result.res, - }; -}; - -// todo delete -proto._getSourceName = function _getSourceName(sourceName, bucketName) { - if (typeof bucketName === 'string') { - sourceName = this._objectName(sourceName); - } else if (sourceName[0] !== '/') { - bucketName = this.options.bucket; - } else { - bucketName = sourceName.replace(/\/(.+?)(\/.*)/, '$1'); - sourceName = sourceName.replace(/(\/.+?\/)(.*)/, '$2'); - } - - _checkBucketName(bucketName); - - sourceName = encodeURIComponent(sourceName); - - sourceName = `/${bucketName}/${sourceName}`; - return sourceName; -}; diff --git a/lib/common/object/delete.js b/lib/common/object/delete.js deleted file mode 100644 index 896498ec7..000000000 --- a/lib/common/object/delete.js +++ /dev/null @@ -1,22 +0,0 @@ -const proto = exports; -/** - * delete - * @param {String} name - object name - * @param {Object} options - * @param {{res}} - */ - -proto.delete = async function _delete(name, options = {}) { - options.subres = Object.assign({}, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('DELETE', name, options); - params.successStatuses = [ 204 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; diff --git a/lib/common/object/deleteMulti.js b/lib/common/object/deleteMulti.js deleted file mode 100644 index 286a7b164..000000000 --- a/lib/common/object/deleteMulti.js +++ /dev/null @@ -1,57 +0,0 @@ -/* eslint-disable object-curly-newline */ -const utility = require('utility'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; - -proto.deleteMulti = async function deleteMulti(names, options = {}) { - const objects = []; - if (!names || !names.length) { - throw new Error('names is required'); - } - for (let i = 0; i < names.length; i++) { - const object = {}; - if (typeof names[i] === 'string') { - object.Key = utility.escape(this._objectName(names[i])); - } else { - const { key, versionId } = names[i]; - object.Key = utility.escape(this._objectName(key)); - object.VersionId = versionId; - } - objects.push(object); - } - - const paramXMLObj = { - Delete: { - Quiet: !!options.quiet, - Object: objects, - }, - }; - - const paramXML = obj2xml(paramXMLObj, { - headers: true, - }); - - options.subres = Object.assign({ delete: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('POST', '', options); - params.mime = 'xml'; - params.content = paramXML; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - - const r = result.data; - let deleted = (r && r.Deleted) || null; - if (deleted) { - if (!Array.isArray(deleted)) { - deleted = [ deleted ]; - } - } - return { - res: result.res, - deleted: deleted || [], - }; -}; diff --git a/lib/common/object/deleteObjectTagging.js b/lib/common/object/deleteObjectTagging.js deleted file mode 100644 index 3b3565407..000000000 --- a/lib/common/object/deleteObjectTagging.js +++ /dev/null @@ -1,25 +0,0 @@ -const proto = exports; -/** - * deleteObjectTagging - * @param {String} name - object name - * @param {Object} options - */ - -proto.deleteObjectTagging = async function deleteObjectTagging( - name, - options = {} -) { - options.subres = Object.assign({ tagging: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - name = this._objectName(name); - const params = this._objectRequestParams('DELETE', name, options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/object/generateObjectUrl.js b/lib/common/object/generateObjectUrl.js deleted file mode 100644 index 0a0275be7..000000000 --- a/lib/common/object/generateObjectUrl.js +++ /dev/null @@ -1,28 +0,0 @@ -const urlutil = require('url'); -const { isIP } = require('../utils/isIP'); - -const proto = exports; - -/** - * Get Object url by name - * @param {String} name - object name - * @param {String} [baseUrl] - If provide `baseUrl`, will use `baseUrl` instead the default `endpoint and bucket`. - * @return {String} object url include bucket - */ -proto.generateObjectUrl = function generateObjectUrl(name, baseUrl) { - if (isIP(this.options.endpoint.hostname)) { - throw new Error('can not get the object URL when endpoint is IP'); - } - if (!baseUrl) { - baseUrl = this.options.endpoint.format(); - const copyUrl = urlutil.parse(baseUrl); - const { bucket } = this.options; - - copyUrl.hostname = `${bucket}.${copyUrl.hostname}`; - copyUrl.host = `${bucket}.${copyUrl.host}`; - baseUrl = copyUrl.format(); - } else if (baseUrl[baseUrl.length - 1] !== '/') { - baseUrl += '/'; - } - return baseUrl + this._escape(this._objectName(name)); -}; diff --git a/lib/common/object/get.js b/lib/common/object/get.js deleted file mode 100644 index 3e4eca2de..000000000 --- a/lib/common/object/get.js +++ /dev/null @@ -1,64 +0,0 @@ -const { createWriteStream } = require('fs'); -const { rm } = require('fs/promises'); -const is = require('is-type-of'); - -const proto = exports; -/** - * get - * @param {String} name - object name - * @param {String | Stream} file - file - * @param {Object} options - options - */ -proto.get = async function get(name, file, options = {}) { - let writeStream = null; - let needDestroy = false; - - if (is.writableStream(file)) { - writeStream = file; - } else if (is.string(file)) { - writeStream = createWriteStream(file); - needDestroy = true; - } else { - // get(name, options) - options = file; - } - - options = options || {}; - const isBrowserEnv = process && process.browser; - const responseCacheControl = options.responseCacheControl === null ? '' : 'no-cache'; - const defaultSubresOptions = - isBrowserEnv && responseCacheControl ? { 'response-cache-control': responseCacheControl } : {}; - options.subres = Object.assign(defaultSubresOptions, options.subres); - - if (options.versionId) { - options.subres.versionId = options.versionId; - } - if (options.process) { - options.subres['x-oss-process'] = options.process; - } - - let result; - try { - const params = this._objectRequestParams('GET', name, options); - params.writeStream = writeStream; - params.successStatuses = [ 200, 206, 304 ]; - - result = await this.request(params); - - if (needDestroy) { - writeStream.destroy(); - } - } catch (err) { - if (needDestroy) { - writeStream.destroy(); - // should delete the exists file before throw error - await rm(file, { force: true }); - } - throw err; - } - - return { - res: result.res, - content: result.data, - }; -}; diff --git a/lib/common/object/getACL.js b/lib/common/object/getACL.js deleted file mode 100644 index b77875e8e..000000000 --- a/lib/common/object/getACL.js +++ /dev/null @@ -1,30 +0,0 @@ -const proto = exports; - -/* - * Get object's ACL - * @param {String} name the object key - * @param {Object} options - * @return {Object} - */ -proto.getACL = async function getACL(name, options = {}) { - options.subres = Object.assign({ acl: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - name = this._objectName(name); - - const params = this._objectRequestParams('GET', name, options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - - const result = await this.request(params); - - return { - acl: result.data.AccessControlList.Grant, - owner: { - id: result.data.Owner.ID, - displayName: result.data.Owner.DisplayName, - }, - res: result.res, - }; -}; diff --git a/lib/common/object/getAsyncFetch.js b/lib/common/object/getAsyncFetch.js deleted file mode 100644 index 6c361abfd..000000000 --- a/lib/common/object/getAsyncFetch.js +++ /dev/null @@ -1,25 +0,0 @@ -const { formatObjKey } = require('../utils/formatObjKey'); - -/** - * getAsyncFetch - * @param {String} taskId taskId - * @param {Object} options options - */ -async function getAsyncFetch(taskId, options = {}) { - options.subres = Object.assign({ asyncFetch: '' }, options.subres); - options.headers = options.headers || {}; - const params = this._objectRequestParams('GET', '', options); - params.headers['x-oss-task-id'] = taskId; - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const taskInfo = formatObjKey(result.data.TaskInfo, 'firstLowerCase'); - return { - res: result.res, - status: result.status, - state: result.data.State, - taskInfo, - }; -} - -exports.getAsyncFetch = getAsyncFetch; diff --git a/lib/common/object/getBucketVersions.js b/lib/common/object/getBucketVersions.js deleted file mode 100644 index 98a086113..000000000 --- a/lib/common/object/getBucketVersions.js +++ /dev/null @@ -1,98 +0,0 @@ -const proto = exports; -const { isObject } = require('../utils/isObject'); - -proto.getBucketVersions = getBucketVersions; -proto.listObjectVersions = getBucketVersions; - -async function getBucketVersions(query = {}, options = {}) { - // prefix, key-marker, max-keys, delimiter, encoding-type, version-id-marker - if (query.versionIdMarker && query.keyMarker === undefined) { - throw new Error('A version-id marker cannot be specified without a key marker'); - } - - options.subres = Object.assign({ versions: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('GET', '', options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - params.query = formatQuery(query); - - const result = await this.request(params); - let objects = result.data.Version || []; - let deleteMarker = result.data.DeleteMarker || []; - const that = this; - if (objects) { - if (!Array.isArray(objects)) { - objects = [ objects ]; - } - objects = objects.map(obj => ({ - name: obj.Key, - url: that._objectUrl(obj.Key), - lastModified: obj.LastModified, - isLatest: obj.IsLatest === 'true', - versionId: obj.VersionId, - etag: obj.ETag, - type: obj.Type, - size: Number(obj.Size), - storageClass: obj.StorageClass, - owner: { - id: obj.Owner.ID, - displayName: obj.Owner.DisplayName, - }, - })); - } - if (deleteMarker) { - if (!Array.isArray(deleteMarker)) { - deleteMarker = [ deleteMarker ]; - } - deleteMarker = deleteMarker.map(obj => ({ - name: obj.Key, - lastModified: obj.LastModified, - versionId: obj.VersionId, - owner: { - id: obj.Owner.ID, - displayName: obj.Owner.DisplayName, - }, - })); - } - let prefixes = result.data.CommonPrefixes || null; - if (prefixes) { - if (!Array.isArray(prefixes)) { - prefixes = [ prefixes ]; - } - prefixes = prefixes.map(item => item.Prefix); - } - return { - res: result.res, - objects, - deleteMarker, - prefixes, - // attirbute of legacy error - nextMarker: result.data.NextKeyMarker || null, - // attirbute of legacy error - NextVersionIdMarker: result.data.NextVersionIdMarker || null, - nextKeyMarker: result.data.NextKeyMarker || null, - nextVersionIdMarker: result.data.NextVersionIdMarker || null, - isTruncated: result.data.IsTruncated === 'true', - }; -} - - -function camel2Line(name) { - return name.replace(/([A-Z])/g, '-$1').toLowerCase(); -} - -function formatQuery(query = {}) { - const obj = {}; - if (isObject(query)) { - Object.keys(query).forEach(key => { - obj[camel2Line(key)] = query[key]; - }); - } - - return obj; -} - diff --git a/lib/common/object/getObjectMeta.js b/lib/common/object/getObjectMeta.js deleted file mode 100644 index f941b06e5..000000000 --- a/lib/common/object/getObjectMeta.js +++ /dev/null @@ -1,23 +0,0 @@ -const proto = exports; -/** - * getObjectMeta - * @param {String} name - object name - * @param {Object} options - * @param {{res}} - */ - -proto.getObjectMeta = async function getObjectMeta(name, options) { - options = options || {}; - name = this._objectName(name); - options.subres = Object.assign({ objectMeta: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('HEAD', name, options); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/object/getObjectTagging.js b/lib/common/object/getObjectTagging.js deleted file mode 100644 index c4b9fcb60..000000000 --- a/lib/common/object/getObjectTagging.js +++ /dev/null @@ -1,34 +0,0 @@ -const proto = exports; -const { isObject } = require('../utils/isObject'); -/** - * getObjectTagging - * @param {String} name - object name - * @param {Object} options - * @return {Object} - */ - -proto.getObjectTagging = async function getObjectTagging(name, options = {}) { - options.subres = Object.assign({ tagging: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - name = this._objectName(name); - const params = this._objectRequestParams('GET', name, options); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - const Tagging = await this.parseXML(result.data); - let { Tag } = Tagging.TagSet; - Tag = Tag && isObject(Tag) ? [ Tag ] : Tag || []; - - const tag = {}; - - Tag.forEach(item => { - tag[item.Key] = item.Value; - }); - - return { - status: result.status, - res: result.res, - tag, - }; -}; diff --git a/lib/common/object/getObjectUrl.js b/lib/common/object/getObjectUrl.js deleted file mode 100644 index 61bcef095..000000000 --- a/lib/common/object/getObjectUrl.js +++ /dev/null @@ -1,21 +0,0 @@ -const { isIP } = require('../utils/isIP'); - -const proto = exports; -/** - * Get Object url by name - * @param {String} name - object name - * @param {String} [baseUrl] - If provide `baseUrl`, - * will use `baseUrl` instead the default `endpoint`. - * @return {String} object url - */ -proto.getObjectUrl = function getObjectUrl(name, baseUrl) { - if (isIP(this.options.endpoint.hostname)) { - throw new Error('can not get the object URL when endpoint is IP'); - } - if (!baseUrl) { - baseUrl = this.options.endpoint.format(); - } else if (baseUrl[baseUrl.length - 1] !== '/') { - baseUrl += '/'; - } - return baseUrl + this._escape(this._objectName(name)); -}; diff --git a/lib/common/object/getSymlink.js b/lib/common/object/getSymlink.js deleted file mode 100644 index 7009ca75e..000000000 --- a/lib/common/object/getSymlink.js +++ /dev/null @@ -1,23 +0,0 @@ -const proto = exports; -/** - * getSymlink - * @param {String} name - object name - * @param {Object} options - * @param {{res}} - */ - -proto.getSymlink = async function getSymlink(name, options = {}) { - options.subres = Object.assign({ symlink: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - name = this._objectName(name); - const params = this._objectRequestParams('GET', name, options); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - const target = result.res.headers['x-oss-symlink-target']; - return { - targetName: decodeURIComponent(target), - res: result.res, - }; -}; diff --git a/lib/common/object/head.js b/lib/common/object/head.js deleted file mode 100644 index f797b650e..000000000 --- a/lib/common/object/head.js +++ /dev/null @@ -1,36 +0,0 @@ -const proto = exports; -/** - * head - * @param {String} name - object name - * @param {Object} options - * @param {{res}} - */ - -proto.head = async function head(name, options = {}) { - options.subres = Object.assign({}, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('HEAD', name, options); - params.successStatuses = [ 200, 304 ]; - - const result = await this.request(params); - - const data = { - meta: null, - res: result.res, - status: result.status, - }; - - if (result.status === 200) { - Object.keys(result.headers).forEach(k => { - if (k.indexOf('x-oss-meta-') === 0) { - if (!data.meta) { - data.meta = {}; - } - data.meta[k.substring(11)] = result.headers[k]; - } - }); - } - return data; -}; diff --git a/lib/common/object/index.js b/lib/common/object/index.js deleted file mode 100644 index 755d3774f..000000000 --- a/lib/common/object/index.js +++ /dev/null @@ -1,25 +0,0 @@ -const merge = require('merge-descriptors'); - -const proto = exports; - -merge(proto, require('./getSymlink')); -merge(proto, require('./putSymlink')); -merge(proto, require('./getObjectMeta')); -merge(proto, require('./copyObject')); -merge(proto, require('./calculatePostSignature')); -merge(proto, require('./getObjectTagging')); -merge(proto, require('./putObjectTagging')); -merge(proto, require('./deleteObjectTagging')); -merge(proto, require('./getBucketVersions')); -merge(proto, require('./deleteMulti')); -merge(proto, require('./getACL')); -merge(proto, require('./putACL')); -merge(proto, require('./head')); -merge(proto, require('./delete')); -merge(proto, require('./get')); -merge(proto, require('./postAsyncFetch')); -merge(proto, require('./getAsyncFetch')); -merge(proto, require('./generateObjectUrl')); -merge(proto, require('./getObjectUrl')); -merge(proto, require('./signatureUrl')); -merge(proto, require('./asyncSignatureUrl')); diff --git a/lib/common/object/postAsyncFetch.js b/lib/common/object/postAsyncFetch.js deleted file mode 100644 index 6398d05f5..000000000 --- a/lib/common/object/postAsyncFetch.js +++ /dev/null @@ -1,43 +0,0 @@ -const { obj2xml } = require('../utils/obj2xml'); - -/** - * postAsyncFetch - * @param {String} object name of the object key - * @param {String} url url - * @param {Object} options options - * {String} options.host - * {String} options.contentMD5 - * {String} options.callback - * {String} options.storageClass Standard/IA/Archive - * {Boolean} options.ignoreSameKey default value true - */ -async function postAsyncFetch(object, url, options = {}) { - options.subres = Object.assign({ asyncFetch: '' }, options.subres); - options.headers = options.headers || {}; - object = this._objectName(object); - const { host = '', contentMD5 = '', callback = '', storageClass = '', ignoreSameKey = true } = options; - const paramXMLObj = { - AsyncFetchTaskConfiguration: { - Url: url, - Object: object, - Host: host, - ContentMD5: contentMD5, - Callback: callback, - StorageClass: storageClass, - IgnoreSameKey: ignoreSameKey, - }, - }; - const params = this._objectRequestParams('POST', '', options); - params.mime = 'xml'; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - params.content = obj2xml(paramXMLObj); - const result = await this.request(params); - return { - res: result.res, - status: result.status, - taskId: result.data.TaskId, - }; -} - -exports.postAsyncFetch = postAsyncFetch; diff --git a/lib/common/object/putACL.js b/lib/common/object/putACL.js deleted file mode 100644 index ea25bab5c..000000000 --- a/lib/common/object/putACL.js +++ /dev/null @@ -1,27 +0,0 @@ -const proto = exports; - -/* - * Set object's ACL - * @param {String} name the object key - * @param {String} acl the object ACL - * @param {Object} options - */ -proto.putACL = async function putACL(name, acl, options) { - options = options || {}; - options.subres = Object.assign({ acl: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - options.headers = options.headers || {}; - options.headers['x-oss-object-acl'] = acl; - name = this._objectName(name); - - const params = this._objectRequestParams('PUT', name, options); - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; diff --git a/lib/common/object/putObjectTagging.js b/lib/common/object/putObjectTagging.js deleted file mode 100644 index 159e52df8..000000000 --- a/lib/common/object/putObjectTagging.js +++ /dev/null @@ -1,43 +0,0 @@ -const { obj2xml } = require('../utils/obj2xml'); -const { checkObjectTag } = require('../utils/checkObjectTag'); - -const proto = exports; -/** - * putObjectTagging - * @param {String} name - object name - * @param {Object} tag - object tag, eg: `{a: "1", b: "2"}` - * @param {Object} options - */ - -proto.putObjectTagging = async function putObjectTagging(name, tag, options = {}) { - checkObjectTag(tag); - - options.subres = Object.assign({ tagging: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - name = this._objectName(name); - const params = this._objectRequestParams('PUT', name, options); - params.successStatuses = [ 200 ]; - tag = Object.keys(tag).map(key => ({ - Key: key, - Value: tag[key], - })); - - const paramXMLObj = { - Tagging: { - TagSet: { - Tag: tag, - }, - }, - }; - - params.mime = 'xml'; - params.content = obj2xml(paramXMLObj); - - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -}; diff --git a/lib/common/object/putSymlink.js b/lib/common/object/putSymlink.js deleted file mode 100644 index 1574dfdc3..000000000 --- a/lib/common/object/putSymlink.js +++ /dev/null @@ -1,33 +0,0 @@ -const proto = exports; -/** - * putSymlink - * @param {String} name - object name - * @param {String} targetName - target name - * @param {Object} options - * @param {{res}} - */ - -proto.putSymlink = async function putSymlink(name, targetName, options) { - options = options || {}; - options.headers = options.headers || {}; - targetName = this._escape(this._objectName(targetName)); - this._convertMetaToHeaders(options.meta, options.headers); - options.headers['x-oss-symlink-target'] = targetName; - options.subres = Object.assign({ symlink: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - - if (options.storageClass) { - options.headers['x-oss-storage-class'] = options.storageClass; - } - - name = this._objectName(name); - const params = this._objectRequestParams('PUT', name, options); - - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/object/signatureUrl.js b/lib/common/object/signatureUrl.js deleted file mode 100644 index b61b7b253..000000000 --- a/lib/common/object/signatureUrl.js +++ /dev/null @@ -1,46 +0,0 @@ -const urlutil = require('url'); -const utility = require('utility'); -const copy = require('copy-to'); -const signHelper = require('../../common/signUtils'); -const { isIP } = require('../utils/isIP'); - -const proto = exports; - -/** - * signatureUrl - * @deprecated will be deprecated in 7.x - * @param {String} name object name - * @param {Object} options options - */ -proto.signatureUrl = function signatureUrl(name, options) { - if (isIP(this.options.endpoint.hostname)) { - throw new Error('can not get the object URL when endpoint is IP'); - } - options = options || {}; - name = this._objectName(name); - options.method = options.method || 'GET'; - const expires = utility.timestamp() + (options.expires || 1800); - const params = { - bucket: this.options.bucket, - object: name, - }; - - const resource = this._getResource(params); - - if (this.options.stsToken) { - options['security-token'] = this.options.stsToken; - } - - const signRes = signHelper._signatureForURL(this.options.accessKeySecret, options, resource, expires); - - const url = urlutil.parse(this._getReqUrl(params)); - url.query = { - OSSAccessKeyId: this.options.accessKeyId, - Expires: expires, - Signature: signRes.Signature, - }; - - copy(signRes.subResource).to(url.query); - - return url.format(); -}; diff --git a/lib/common/parallel.js b/lib/common/parallel.js deleted file mode 100644 index 1b249f381..000000000 --- a/lib/common/parallel.js +++ /dev/null @@ -1,173 +0,0 @@ -const proto = exports; - -proto._parallelNode = async function _parallelNode(todo, parallel, fn, sourceData) { - const that = this; - // upload in parallel - const jobErr = []; - let jobs = []; - const tempBatch = todo.length / parallel; - const remainder = todo.length % parallel; - const batch = remainder === 0 ? tempBatch : ((todo.length - remainder) / parallel) + 1; - let taskIndex = 1; - for (let i = 0; i < todo.length; i++) { - if (that.isCancel()) { - break; - } - - if (sourceData) { - jobs.push(fn(that, todo[i], sourceData)); - } else { - jobs.push(fn(that, todo[i])); - } - - if (jobs.length === parallel || (taskIndex === batch && i === (todo.length - 1))) { - try { - taskIndex += 1; - /* eslint no-await-in-loop: [0] */ - await Promise.all(jobs); - } catch (err) { - jobErr.push(err); - } - jobs = []; - } - } - - return jobErr; -}; - -proto._parallel = function _parallel(todo, parallel, jobPromise) { - const that = this; - return new Promise(resolve => { - const _jobErr = []; - if (parallel <= 0 || !todo) { - resolve(_jobErr); - return; - } - - function onlyOnce(fn) { - return function(...args) { - if (fn === null) throw new Error('Callback was already called.'); - const callFn = fn; - fn = null; - callFn.apply(this, args); - }; - } - - function createArrayIterator(coll) { - let i = -1; - const len = coll.length; - return function next() { - return (++i < len && !that.isCancel()) ? { value: coll[i], key: i } : null; - }; - } - - const nextElem = createArrayIterator(todo); - let done = false; - let running = 0; - let looping = false; - - function iterateeCallback(err) { - running -= 1; - if (err) { - done = true; - _jobErr.push(err); - resolve(_jobErr); - } else if (done && running <= 0) { - done = true; - resolve(_jobErr); - } else if (!looping) { - /* eslint no-use-before-define: [0] */ - if (that.isCancel()) { - resolve(_jobErr); - } else { - replenish(); - } - } - } - - function iteratee(value, callback) { - jobPromise(value).then(result => { - callback(null, result); - }).catch(err => { - callback(err); - }); - } - - function replenish() { - looping = true; - while (running < parallel && !done && !that.isCancel()) { - const elem = nextElem(); - if (elem === null || _jobErr.length > 0) { - done = true; - if (running <= 0) { - resolve(_jobErr); - } - return; - } - running += 1; - iteratee(elem.value, onlyOnce(iterateeCallback)); - } - looping = false; - } - - replenish(); - }); -}; - -/** - * cancel operation, now can use with multipartUpload - * @param {Object} abort - * {String} anort.name object key - * {String} anort.uploadId upload id - * {String} anort.options timeout - */ -proto.cancel = function cancel(abort) { - this.options.cancelFlag = true; - - if (Array.isArray(this.multipartUploadStreams)) { - this.multipartUploadStreams.forEach(_ => { - if (_.destroyed === false) { - const err = { - name: 'cancel', - message: 'cancel', - }; - _.destroy(err); - } - }); - } - this.multipartUploadStreams = []; - if (abort) { - this.abortMultipartUpload(abort.name, abort.uploadId, abort.options); - } -}; - -proto.isCancel = function isCancel() { - return this.options.cancelFlag; -}; - -proto.resetCancelFlag = function resetCancelFlag() { - this.options.cancelFlag = false; -}; - -proto._stop = function _stop() { - this.options.cancelFlag = true; -}; - -// cancel is not error , so create an object -proto._makeCancelEvent = function _makeCancelEvent() { - const cancelEvent = { - status: 0, - name: 'cancel', - }; - return cancelEvent; -}; - -// abort is not error , so create an object -proto._makeAbortEvent = function _makeAbortEvent() { - const abortEvent = { - status: 0, - name: 'abort', - message: 'upload task has been abort', - }; - return abortEvent; -}; diff --git a/lib/common/signUtils.js b/lib/common/signUtils.js deleted file mode 100644 index cd967f2a2..000000000 --- a/lib/common/signUtils.js +++ /dev/null @@ -1,177 +0,0 @@ -const crypto = require('crypto'); -const is = require('is-type-of'); -const { lowercaseKeyHeader } = require('./utils/lowercaseKeyHeader'); - -/** - * buildCanonicalizedResource - * @param {String} resourcePath resourcePath - * @param {Object} parameters parameters - * @return {String} resource string - */ -exports.buildCanonicalizedResource = function buildCanonicalizedResource(resourcePath, parameters) { - let canonicalizedResource = `${resourcePath}`; - let separatorString = '?'; - - if (is.string(parameters) && parameters.trim() !== '') { - canonicalizedResource += separatorString + parameters; - } else if (is.array(parameters)) { - parameters.sort(); - canonicalizedResource += separatorString + parameters.join('&'); - } else if (parameters) { - const compareFunc = (entry1, entry2) => { - if (entry1[0] > entry2[0]) { - return 1; - } else if (entry1[0] < entry2[0]) { - return -1; - } - return 0; - }; - const processFunc = key => { - canonicalizedResource += separatorString + key; - if (parameters[key] || parameters[key] === 0) { - canonicalizedResource += `=${parameters[key]}`; - } - separatorString = '&'; - }; - Object.keys(parameters).sort(compareFunc).forEach(processFunc); - } - - return canonicalizedResource; -}; - -/** - * @param {String} method method - * @param {String} resourcePath resourcePath - * @param {Object} request request - * @param {String} expires expires - * @return {String} canonicalString - */ -exports.buildCanonicalString = function canonicalString(method, resourcePath, request, expires) { - request = request || {}; - const headers = lowercaseKeyHeader(request.headers); - const OSS_PREFIX = 'x-oss-'; - const ossHeaders = []; - const headersToSign = {}; - - let signContent = [ - method.toUpperCase(), - headers['content-md5'] || '', - headers['content-type'], - expires || headers['x-oss-date'], - ]; - - Object.keys(headers).forEach(key => { - const lowerKey = key.toLowerCase(); - if (lowerKey.indexOf(OSS_PREFIX) === 0) { - headersToSign[lowerKey] = String(headers[key]).trim(); - } - }); - - Object.keys(headersToSign).sort().forEach(key => { - ossHeaders.push(`${key}:${headersToSign[key]}`); - }); - - signContent = signContent.concat(ossHeaders); - - signContent.push(this.buildCanonicalizedResource(resourcePath, request.parameters)); - - return signContent.join('\n'); -}; - -/** - * @param {String} accessKeySecret accessKeySecret - * @param {String} canonicalString canonicalString - * @param {String} headerEncoding headerEncoding - */ -exports.computeSignature = function computeSignature(accessKeySecret, canonicalString, headerEncoding = 'utf-8') { - const signature = crypto.createHmac('sha1', accessKeySecret); - return signature.update(Buffer.from(canonicalString, headerEncoding)).digest('base64'); -}; - -/** - * @param {String} accessKeyId accessKeyId - * @param {String} accessKeySecret accessKeySecret - * @param {String} canonicalString canonicalString - * @param {String} headerEncoding headerEncoding - */ -exports.authorization = function authorization(accessKeyId, accessKeySecret, canonicalString, headerEncoding) { - return `OSS ${accessKeyId}:${this.computeSignature(accessKeySecret, canonicalString, headerEncoding)}`; -}; - -/** - * - * @param {String} accessKeySecret accessKeySecret - * @param {Object} options options - * @param {String} resource resource - * @param {Number} expires expires - * @param {String} headerEncoding headerEncoding - */ -exports._signatureForURL = function _signatureForURL(accessKeySecret, options = {}, resource, expires, headerEncoding) { - const headers = {}; - const { subResource = {} } = options; - - if (options.process) { - const processKeyword = 'x-oss-process'; - subResource[processKeyword] = options.process; - } - - if (options.trafficLimit) { - const trafficLimitKey = 'x-oss-traffic-limit'; - subResource[trafficLimitKey] = options.trafficLimit; - } - - if (options.response) { - Object.keys(options.response).forEach(k => { - const key = `response-${k.toLowerCase()}`; - subResource[key] = options.response[k]; - }); - } - - Object.keys(options).forEach(key => { - const lowerKey = key.toLowerCase(); - const value = options[key]; - if (lowerKey.indexOf('x-oss-') === 0) { - headers[lowerKey] = value; - } else if (lowerKey.indexOf('content-md5') === 0) { - headers[key] = value; - } else if (lowerKey.indexOf('content-type') === 0) { - headers[key] = value; - } - }); - - if (Object.prototype.hasOwnProperty.call(options, 'security-token')) { - subResource['security-token'] = options['security-token']; - } - - if (Object.prototype.hasOwnProperty.call(options, 'callback')) { - const json = { - callbackUrl: encodeURI(options.callback.url), - callbackBody: options.callback.body, - }; - if (options.callback.host) { - json.callbackHost = options.callback.host; - } - if (options.callback.contentType) { - json.callbackBodyType = options.callback.contentType; - } - subResource.callback = Buffer.from(JSON.stringify(json)).toString('base64'); - - if (options.callback.customValue) { - const callbackVar = {}; - Object.keys(options.callback.customValue).forEach(key => { - callbackVar[`x:${key}`] = options.callback.customValue[key]; - }); - subResource['callback-var'] = Buffer.from(JSON.stringify(callbackVar)).toString('base64'); - } - } - - const canonicalString = this.buildCanonicalString(options.method, resource, { - headers, - parameters: subResource, - }, expires.toString()); - - return { - Signature: this.computeSignature(accessKeySecret, canonicalString, headerEncoding), - subResource, - }; -}; diff --git a/lib/common/utils/checkBucketName.d.ts b/lib/common/utils/checkBucketName.d.ts deleted file mode 100644 index 4de489dc7..000000000 --- a/lib/common/utils/checkBucketName.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const checkBucketName: (name: string, createBucket?: boolean) => void; diff --git a/lib/common/utils/checkBucketTag.js b/lib/common/utils/checkBucketTag.js deleted file mode 100644 index 773bebc5e..000000000 --- a/lib/common/utils/checkBucketTag.js +++ /dev/null @@ -1,48 +0,0 @@ -const { checkValid } = require('./checkValid'); -const { isObject } = require('./isObject'); - -const commonRules = [ - { - validator: value => { - if (typeof value !== 'string') { - throw new Error('the key and value of the tag must be String'); - } - }, - }, -]; -const rules = { - key: [ - ...commonRules, - { - pattern: /^.{1,64}$/, - msg: 'tag key can be a maximum of 64 bytes in length', - }, - { - pattern: /^(?!https*:\/\/|Aliyun)/, - msg: 'tag key can not startsWith: http://, https://, Aliyun', - }, - ], - value: [ - ...commonRules, - { - pattern: /^.{0,128}$/, - msg: 'tag value can be a maximum of 128 bytes in length', - }, - ], -}; - -exports.checkBucketTag = tag => { - if (!isObject(tag)) { - throw new Error('bucket tag must be Object'); - } - const entries = Object.entries(tag); - if (entries.length > 20) { - throw new Error('maximum of 20 tags for a bucket'); - } - const rulesIndexKey = [ 'key', 'value' ]; - entries.forEach(keyValue => { - keyValue.forEach((item, index) => { - checkValid(item, rules[rulesIndexKey[index]]); - }); - }); -}; diff --git a/lib/common/utils/checkConfigValid.d.ts b/lib/common/utils/checkConfigValid.d.ts deleted file mode 100644 index d1bcb3fc0..000000000 --- a/lib/common/utils/checkConfigValid.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const checkConfigValid: (conf: any, key: 'endpoint' | 'region') => void; diff --git a/lib/common/utils/checkConfigValid.js b/lib/common/utils/checkConfigValid.js deleted file mode 100644 index 684cb1525..000000000 --- a/lib/common/utils/checkConfigValid.js +++ /dev/null @@ -1,27 +0,0 @@ -const checkConfigMap = { - endpoint: checkEndpoint, - region: /^[a-zA-Z0-9\-_]+$/, -}; - -function checkEndpoint(endpoint) { - if (typeof endpoint === 'string') { - return /^[a-zA-Z0-9._:/-]+$/.test(endpoint); - } else if (endpoint.host) { - return /^[a-zA-Z0-9._:/-]+$/.test(endpoint.host); - } - return false; -} - -exports.checkConfigValid = (conf, key) => { - if (checkConfigMap[key]) { - let isConfigValid = true; - if (checkConfigMap[key] instanceof Function) { - isConfigValid = checkConfigMap[key](conf); - } else { - isConfigValid = checkConfigMap[key].test(conf); - } - if (!isConfigValid) { - throw new Error(`The ${key} must be conform to the specifications`); - } - } -}; diff --git a/lib/common/utils/checkObjectTag.js b/lib/common/utils/checkObjectTag.js deleted file mode 100644 index 5fd30b911..000000000 --- a/lib/common/utils/checkObjectTag.js +++ /dev/null @@ -1,51 +0,0 @@ -const { checkValid } = require('./checkValid'); -const { isObject } = require('./isObject'); - -const commonRules = [ - { - validator: value => { - if (typeof value !== 'string') { - throw new Error('the key and value of the tag must be String'); - } - }, - }, - { - pattern: /^[a-zA-Z0-9 +-=._:/]+$/, - msg: 'tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)', - }, -]; - -const rules = { - key: [ - ...commonRules, - { - pattern: /^.{1,128}$/, - msg: 'tag key can be a maximum of 128 bytes in length', - }, - ], - value: [ - ...commonRules, - { - pattern: /^.{0,256}$/, - msg: 'tag value can be a maximum of 256 bytes in length', - }, - ], -}; - -function checkObjectTag(tag) { - if (!isObject(tag)) { - throw new Error('tag must be Object'); - } - const entries = Object.entries(tag); - if (entries.length > 10) { - throw new Error('maximum of 10 tags for a object'); - } - const rulesIndexKey = [ 'key', 'value' ]; - entries.forEach(keyValue => { - keyValue.forEach((item, index) => { - checkValid(item, rules[rulesIndexKey[index]]); - }); - }); -} - -exports.checkObjectTag = checkObjectTag; diff --git a/lib/common/utils/checkValid.js b/lib/common/utils/checkValid.js deleted file mode 100644 index 64a8497d4..000000000 --- a/lib/common/utils/checkValid.js +++ /dev/null @@ -1,11 +0,0 @@ -function checkValid(_value, _rules) { - _rules.forEach(rule => { - if (rule.validator) { - rule.validator(_value); - } else if (rule.pattern && !rule.pattern.test(_value)) { - throw new Error(rule.msg); - } - }); -} - -exports.checkValid = checkValid; diff --git a/lib/common/utils/createRequest.d.ts b/lib/common/utils/createRequest.d.ts deleted file mode 100644 index 666438c19..000000000 --- a/lib/common/utils/createRequest.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -interface ReqParams { - [propName: string]: any; -} -export declare function createRequest(this: any, params: any): { - url: any; - params: ReqParams; -}; -export {}; diff --git a/lib/common/utils/createRequest.js b/lib/common/utils/createRequest.js deleted file mode 100644 index 0aac4c5ef..000000000 --- a/lib/common/utils/createRequest.js +++ /dev/null @@ -1,92 +0,0 @@ -const debug = require('util').debuglog('oss-client:createRequest'); -const crypto = require('crypto'); -const mime = require('mime'); -const dateFormat = require('dateformat'); -const copy = require('copy-to'); -const path = require('path'); -const { encoder } = require('./encoder'); -const { isIP } = require('./isIP'); -const { setRegion } = require('./setRegion'); -const { getReqUrl } = require('../client/getReqUrl'); - -function getHeader(headers, name) { - return headers[name] || headers[name.toLowerCase()]; -} - -function delHeader(headers, name) { - delete headers[name]; - delete headers[name.toLowerCase()]; -} - -function createRequest(params) { - let date = new Date(); - if (this.options.amendTimeSkewed) { - date = +new Date() + this.options.amendTimeSkewed; - } - const headers = { - 'x-oss-date': dateFormat(date, "UTC:ddd, dd mmm yyyy HH:MM:ss 'GMT'"), - }; - headers['User-Agent'] = this.userAgent; - if (this.options.isRequestPay) { - Object.assign(headers, { 'x-oss-request-payer': 'requester' }); - } - if (this.options.stsToken) { - headers['x-oss-security-token'] = this.options.stsToken; - } - copy(params.headers).to(headers); - if (!getHeader(headers, 'Content-Type')) { - if (params.mime && params.mime.indexOf('/') > 0) { - headers['Content-Type'] = params.mime; - } else { - headers['Content-Type'] = mime.getType(params.mime || path.extname(params.object || '')); - } - } - if (!getHeader(headers, 'Content-Type')) { - delHeader(headers, 'Content-Type'); - } - if (params.content) { - if (!params.disabledMD5) { - if (!params.headers || !params.headers['Content-MD5']) { - headers['Content-MD5'] = crypto.createHash('md5').update(Buffer.from(params.content, 'utf8')).digest('base64'); - } else { - headers['Content-MD5'] = params.headers['Content-MD5']; - } - } - if (!headers['Content-Length']) { - headers['Content-Length'] = params.content.length; - } - } - const { hasOwnProperty } = Object.prototype; - for (const k in headers) { - if (headers[k] && hasOwnProperty.call(headers, k)) { - headers[k] = encoder(String(headers[k]), this.options.headerEncoding); - } - } - const authResource = this._getResource(params); - headers.authorization = this.authorization(params.method, authResource, params.subres, headers, this.options.headerEncoding); - // const url = this._getReqUrl(params); - if (isIP(this.options.endpoint.hostname)) { - const { region, internal, secure } = this.options; - const hostInfo = setRegion(region, internal, secure); - headers.host = `${params.bucket}.${hostInfo.host}`; - } - const url = getReqUrl.bind(this)(params); - debug('request %s %s, with headers %j, !!stream: %s', params.method, url, headers, !!params.stream); - const timeout = params.timeout || this.options.timeout; - const reqParams = { - method: params.method, - content: params.content, - stream: params.stream, - headers, - timeout, - writeStream: params.writeStream, - customResponse: params.customResponse, - timing: true, - ctx: params.ctx || this.ctx, - }; - return { - url, - params: reqParams, - }; -} -exports.createRequest = createRequest; diff --git a/lib/common/utils/dataFix.d.ts b/lib/common/utils/dataFix.d.ts deleted file mode 100644 index da522adc2..000000000 --- a/lib/common/utils/dataFix.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -interface Rename { - [propName: string]: string; -} -interface Config { - lowerFirst?: boolean; - rename?: Rename; - remove?: string[]; - camel?: string[]; - bool?: string[]; -} -export declare function dataFix(o: object, conf: Config, finalKill?: Function): typeof dataFix | undefined; -export {}; diff --git a/lib/common/utils/dataFix.js b/lib/common/utils/dataFix.js deleted file mode 100644 index b9f4b9541..000000000 --- a/lib/common/utils/dataFix.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.dataFix = void 0; -const isObject_1 = require('./isObject'); -const TRUE = [ 'true', 'TRUE', '1', 1 ]; -const FALSE = [ 'false', 'FALSE', '0', 0 ]; -function dataFix(o, conf, finalKill) { - if (!isObject_1.isObject(o)) { return; } - const { remove = [], rename = {}, camel = [], bool = [], lowerFirst = false } = conf; - // 删除不需要的数据 - remove.forEach(v => delete o[v]); - // 重命名 - Object.entries(rename).forEach(v => { - if (!o[v[0]]) { return; } - if (o[v[1]]) { return; } - o[v[1]] = o[v[0]]; - delete o[v[0]]; - }); - // 驼峰化 - camel.forEach(v => { - if (!o[v]) { return; } - const afterKey = v - .replace(/^(.)/, $0 => $0.toLowerCase()) - .replace(/-(\w)/g, (_, $1) => $1.toUpperCase()); - if (o[afterKey]) { return; } - o[afterKey] = o[v]; - // todo 暂时兼容以前数据,不做删除 - // delete o[v]; - }); - // 转换值为布尔值 - bool.forEach(v => { - o[v] = fixBool(o[v]); - }); - // finalKill - if (typeof finalKill === 'function') { - finalKill(o); - } - // 首字母转小写 - fixLowerFirst(o, lowerFirst); - return dataFix; -} -exports.dataFix = dataFix; -function fixBool(value) { - if (!value) { return false; } - if (TRUE.includes(value)) { return true; } - return FALSE.includes(value) ? false : value; -} -function fixLowerFirst(o, lowerFirst) { - if (lowerFirst) { - Object.keys(o).forEach(key => { - const lowerK = key.replace(/^\w/, match => match.toLowerCase()); - if (typeof o[lowerK] === 'undefined') { - o[lowerK] = o[key]; - delete o[key]; - } - }); - } -} diff --git a/lib/common/utils/dataFix.ts b/lib/common/utils/dataFix.ts deleted file mode 100644 index 8c1b73342..000000000 --- a/lib/common/utils/dataFix.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { isObject } from './isObject'; - -interface Rename { - [propName: string]: string; -} - -interface Config { - lowerFirst?: boolean; - rename?: Rename; - remove?: string[]; - camel?: string[]; - bool?: string[]; -} - -const TRUE = ['true', 'TRUE', '1', 1]; -const FALSE = ['false', 'FALSE', '0', 0]; - -export function dataFix(o: object, conf: Config, finalKill?: Function) { - if (!isObject(o)) return; - - const { - remove = [], - rename = {}, - camel = [], - bool = [], - lowerFirst = false, - } = conf; - - // 删除不需要的数据 - remove.forEach(v => delete o[v]); - - // 重命名 - Object.entries(rename).forEach(v => { - if (!o[v[0]]) return; - if (o[v[1]]) return; - o[v[1]] = o[v[0]]; - delete o[v[0]]; - }); - - // 驼峰化 - camel.forEach(v => { - if (!o[v]) return; - const afterKey = v - .replace(/^(.)/, $0 => $0.toLowerCase()) - .replace(/-(\w)/g, (_, $1) => $1.toUpperCase()); - if (o[afterKey]) return; - o[afterKey] = o[v]; - // todo 暂时兼容以前数据,不做删除 - // delete o[v]; - }); - - // 转换值为布尔值 - bool.forEach(v => { - o[v] = fixBool(o[v]); - }); - - // finalKill - if (typeof finalKill === 'function') { - finalKill(o); - } - - // 首字母转小写 - fixLowerFirst(o, lowerFirst); - - return dataFix; -} - -function fixBool(value) { - if (!value) return false; - - if (TRUE.includes(value)) return true; - - return FALSE.includes(value) ? false : value; -} - -function fixLowerFirst(o, lowerFirst) { - if (lowerFirst) { - Object.keys(o).forEach(key => { - const lowerK = key.replace(/^\w/, match => match.toLowerCase()); - if (typeof o[lowerK] === 'undefined') { - o[lowerK] = o[key]; - delete o[key]; - } - }); - } -} diff --git a/lib/common/utils/deepCopy.js b/lib/common/utils/deepCopy.js deleted file mode 100644 index 873beaa94..000000000 --- a/lib/common/utils/deepCopy.js +++ /dev/null @@ -1,36 +0,0 @@ -exports.deepCopy = obj => { - if (obj === null || typeof obj !== 'object') { - return obj; - } - if (Buffer.isBuffer(obj)) { - return obj.slice(); - } - const copy = Array.isArray(obj) ? [] : {}; - Object.keys(obj).forEach(key => { - copy[key] = exports.deepCopy(obj[key]); - }); - return copy; -}; - -exports.deepCopyWith = (obj, customizer) => { - function deepCopyWithHelper(value, innerKey, innerObject) { - const result = customizer(value, innerKey, innerObject); - if (result !== undefined) { return result; } - if (value === null || typeof value !== 'object') { - return value; - } - if (Buffer.isBuffer(value)) { - return value.slice(); - } - const copy = Array.isArray(value) ? [] : {}; - Object.keys(value).forEach(k => { - copy[k] = deepCopyWithHelper(value[k], k, value); - }); - return copy; - } - if (customizer) { - return deepCopyWithHelper(obj, '', null); - } - - return exports.deepCopy(obj); -}; diff --git a/lib/common/utils/encoder.js b/lib/common/utils/encoder.js deleted file mode 100644 index abe27eef1..000000000 --- a/lib/common/utils/encoder.js +++ /dev/null @@ -1,6 +0,0 @@ -function encoder(str, encoding = 'utf-8') { - if (encoding === 'utf-8') { return str; } - return Buffer.from(str).toString('latin1'); -} - -exports.encoder = encoder; diff --git a/lib/common/utils/formatInventoryConfig.d.ts b/lib/common/utils/formatInventoryConfig.d.ts deleted file mode 100644 index 2210338ae..000000000 --- a/lib/common/utils/formatInventoryConfig.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function formatInventoryConfig(inventoryConfig: any, toArray?: boolean): any; diff --git a/lib/common/utils/formatInventoryConfig.js b/lib/common/utils/formatInventoryConfig.js deleted file mode 100644 index 04f2342fb..000000000 --- a/lib/common/utils/formatInventoryConfig.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.formatInventoryConfig = void 0; -const dataFix_1 = require('../utils/dataFix'); -const isObject_1 = require('../utils/isObject'); -const formatObjKey_1 = require('../utils/formatObjKey'); -function formatInventoryConfig(inventoryConfig, toArray = false) { - if (toArray && isObject_1.isObject(inventoryConfig)) { inventoryConfig = [ inventoryConfig ]; } - if (Array.isArray(inventoryConfig)) { - inventoryConfig = inventoryConfig.map(formatFn); - } else { - inventoryConfig = formatFn(inventoryConfig); - } - return inventoryConfig; -} -exports.formatInventoryConfig = formatInventoryConfig; -function formatFn(_) { - dataFix_1.dataFix(_, { bool: [ 'IsEnabled' ] }, conf => { - let _a, - _b; - // prefix - conf.prefix = conf.Filter.Prefix; - delete conf.Filter; - // OSSBucketDestination - conf.OSSBucketDestination = conf.Destination.OSSBucketDestination; - // OSSBucketDestination.rolename - conf.OSSBucketDestination.rolename = conf.OSSBucketDestination.RoleArn.replace(/.*\//, ''); - delete conf.OSSBucketDestination.RoleArn; - // OSSBucketDestination.bucket - conf.OSSBucketDestination.bucket = conf.OSSBucketDestination.Bucket.replace(/.*:::/, ''); - delete conf.OSSBucketDestination.Bucket; - delete conf.Destination; - // frequency - conf.frequency = conf.Schedule.Frequency; - delete conf.Schedule.Frequency; - // optionalFields - if (((_a = conf === null || conf === void 0 ? void 0 : conf.OptionalFields) === null || _a === void 0 ? void 0 : _a.Field) && !Array.isArray((_b = conf.OptionalFields) === null || _b === void 0 ? void 0 : _b.Field)) { conf.OptionalFields.Field = [ conf.OptionalFields.Field ]; } - }); - // firstLowerCase - _ = formatObjKey_1.formatObjKey(_, 'firstLowerCase', { exclude: [ 'OSSBucketDestination', 'SSE-OSS', 'SSE-KMS' ] }); - return _; -} diff --git a/lib/common/utils/formatInventoryConfig.ts b/lib/common/utils/formatInventoryConfig.ts deleted file mode 100644 index 827cdd9bf..000000000 --- a/lib/common/utils/formatInventoryConfig.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { dataFix } from '../utils/dataFix'; -import { isObject } from '../utils/isObject'; -import { formatObjKey } from '../utils/formatObjKey'; - -export function formatInventoryConfig(inventoryConfig, toArray = false) { - if (toArray && isObject(inventoryConfig)) inventoryConfig = [inventoryConfig]; - - if (Array.isArray(inventoryConfig)) { - inventoryConfig = inventoryConfig.map(formatFn); - } else { - inventoryConfig = formatFn(inventoryConfig); - } - return inventoryConfig; -} - -function formatFn(_) { - dataFix(_, { bool: ['IsEnabled'] }, conf => { - // prefix - conf.prefix = conf.Filter.Prefix; - delete conf.Filter; - // OSSBucketDestination - conf.OSSBucketDestination = conf.Destination.OSSBucketDestination; - // OSSBucketDestination.rolename - conf.OSSBucketDestination.rolename = conf.OSSBucketDestination.RoleArn.replace(/.*\//, ''); - delete conf.OSSBucketDestination.RoleArn; - // OSSBucketDestination.bucket - conf.OSSBucketDestination.bucket = conf.OSSBucketDestination.Bucket.replace(/.*:::/, ''); - delete conf.OSSBucketDestination.Bucket; - delete conf.Destination; - // frequency - conf.frequency = conf.Schedule.Frequency; - delete conf.Schedule.Frequency; - // optionalFields - if (conf?.OptionalFields?.Field && !Array.isArray(conf.OptionalFields?.Field)) conf.OptionalFields.Field = [conf.OptionalFields.Field]; - }); - // firstLowerCase - _ = formatObjKey(_, 'firstLowerCase', { exclude: ['OSSBucketDestination', 'SSE-OSS', 'SSE-KMS'] }); - return _; -} diff --git a/lib/common/utils/formatObjKey.d.ts b/lib/common/utils/formatObjKey.d.ts deleted file mode 100644 index 4e370577e..000000000 --- a/lib/common/utils/formatObjKey.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -interface Config { - exclude?: string[]; -} -declare type FormatObjKeyType = 'firstUpperCase' | 'firstLowerCase'; -export declare function formatObjKey(obj: any, type: FormatObjKeyType, options?: Config): any; -export {}; diff --git a/lib/common/utils/formatObjKey.js b/lib/common/utils/formatObjKey.js deleted file mode 100644 index 05d04b73c..000000000 --- a/lib/common/utils/formatObjKey.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.formatObjKey = void 0; -function formatObjKey(obj, type, options) { - if (obj === null || typeof obj !== 'object') { - return obj; - } - let o; - if (Array.isArray(obj)) { - o = []; - for (let i = 0; i < obj.length; i++) { - o.push(formatObjKey(obj[i], type, options)); - } - } else { - o = {}; - Object.keys(obj).forEach(key => { - o[handelFormat(key, type, options)] = formatObjKey(obj[key], type, options); - }); - } - return o; -} -exports.formatObjKey = formatObjKey; -function handelFormat(key, type, options) { - let _a; - if (options && ((_a = options.exclude) === null || _a === void 0 ? void 0 : _a.includes(key))) { return key; } - if (type === 'firstUpperCase') { - key = key.replace(/^./, _ => _.toUpperCase()); - } else if (type === 'firstLowerCase') { - key = key.replace(/^./, _ => _.toLowerCase()); - } - return key; -} diff --git a/lib/common/utils/formatObjKey.ts b/lib/common/utils/formatObjKey.ts deleted file mode 100644 index ac3db57a9..000000000 --- a/lib/common/utils/formatObjKey.ts +++ /dev/null @@ -1,36 +0,0 @@ - -interface Config { - exclude?: string[]; -} - -type FormatObjKeyType = 'firstUpperCase' | 'firstLowerCase'; - -export function formatObjKey(obj: any, type: FormatObjKeyType, options?: Config) { - if (obj === null || typeof obj !== 'object') { - return obj; - } - - let o:any; - if (Array.isArray(obj)) { - o = []; - for (let i = 0; i < obj.length; i++) { - o.push(formatObjKey(obj[i], type, options)); - } - } else { - o = {}; - Object.keys(obj).forEach((key) => { - o[handelFormat(key, type, options)] = formatObjKey(obj[key], type, options); - }); - } - return o; -} - -function handelFormat(key: string, type: FormatObjKeyType, options?: Config) { - if (options && options.exclude?.includes(key)) return key; - if (type === 'firstUpperCase') { - key = key.replace(/^./, (_: string) => _.toUpperCase()); - } else if (type === 'firstLowerCase') { - key = key.replace(/^./, (_: string) => _.toLowerCase()); - } - return key; -} diff --git a/lib/common/utils/formatTag.js b/lib/common/utils/formatTag.js deleted file mode 100644 index 3234e53d1..000000000 --- a/lib/common/utils/formatTag.js +++ /dev/null @@ -1,19 +0,0 @@ -const { isObject } = require('./isObject'); - -function formatTag(obj) { - if (obj.Tagging !== undefined) { - obj = obj.Tagging.TagSet.Tag; - } else if (obj.TagSet !== undefined) { - obj = obj.TagSet.Tag; - } else if (obj.Tag !== undefined) { - obj = obj.Tag; - } - obj = obj && isObject(obj) ? [ obj ] : obj || []; - const tag = {}; - obj.forEach(item => { - tag[item.Key] = item.Value; - }); - return tag; -} - -exports.formatTag = formatTag; diff --git a/lib/common/utils/getStrBytesCount.js b/lib/common/utils/getStrBytesCount.js deleted file mode 100644 index 578db7586..000000000 --- a/lib/common/utils/getStrBytesCount.js +++ /dev/null @@ -1,14 +0,0 @@ -function getStrBytesCount(str) { - let bytesCount = 0; - for (let i = 0; i < str.length; i++) { - const c = str.charAt(i); - if (/^[\u00-\uff]$/.test(c)) { - bytesCount += 1; - } else { - bytesCount += 2; - } - } - return bytesCount; -} - -exports.getStrBytesCount = getStrBytesCount; diff --git a/lib/common/utils/isFunction.js b/lib/common/utils/isFunction.js deleted file mode 100644 index 78b531220..000000000 --- a/lib/common/utils/isFunction.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.isFunction = v => { - return typeof v === 'function'; -}; diff --git a/lib/common/utils/isIP.js b/lib/common/utils/isIP.js deleted file mode 100644 index ecd8af777..000000000 --- a/lib/common/utils/isIP.js +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Judge isIP include ipv4 or ipv6 - * @param {String} host host - * @return {Array} the multipart uploads - */ -exports.isIP = host => { - const ipv4Regex = /^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$/; - const ipv6Regex = /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/; - return ipv4Regex.test(host) || ipv6Regex.test(host); -}; diff --git a/lib/common/utils/isObject.d.ts b/lib/common/utils/isObject.d.ts deleted file mode 100644 index ef86365a9..000000000 --- a/lib/common/utils/isObject.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const isObject: (obj: any) => boolean; diff --git a/lib/common/utils/isObject.js b/lib/common/utils/isObject.js deleted file mode 100644 index 57ca615e0..000000000 --- a/lib/common/utils/isObject.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.isObject = obj => { - return Object.prototype.toString.call(obj) === '[object Object]'; -}; diff --git a/lib/common/utils/lowercaseKeyHeader.d.ts b/lib/common/utils/lowercaseKeyHeader.d.ts deleted file mode 100644 index bcb2902a1..000000000 --- a/lib/common/utils/lowercaseKeyHeader.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function lowercaseKeyHeader(headers: any): {}; diff --git a/lib/common/utils/lowercaseKeyHeader.js b/lib/common/utils/lowercaseKeyHeader.js deleted file mode 100644 index cb198085e..000000000 --- a/lib/common/utils/lowercaseKeyHeader.js +++ /dev/null @@ -1,13 +0,0 @@ -const { isObject } = require('./isObject'); - -function lowercaseKeyHeader(headers) { - const lowercaseHeader = {}; - if (isObject(headers)) { - Object.keys(headers).forEach(key => { - lowercaseHeader[key.toLowerCase()] = headers[key]; - }); - } - return lowercaseHeader; -} - -exports.lowercaseKeyHeader = lowercaseKeyHeader; diff --git a/lib/common/utils/obj2xml.d.ts b/lib/common/utils/obj2xml.d.ts deleted file mode 100644 index 5ea5260c1..000000000 --- a/lib/common/utils/obj2xml.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function obj2xml(obj: any, options?: any): string; diff --git a/lib/common/utils/obj2xml.js b/lib/common/utils/obj2xml.js deleted file mode 100644 index 4c8a8bb45..000000000 --- a/lib/common/utils/obj2xml.js +++ /dev/null @@ -1,41 +0,0 @@ -const { formatObjKey } = require('./formatObjKey'); - -function type(params) { - return Object.prototype.toString - .call(params) - .replace(/(.*? |])/g, '') - .toLowerCase(); -} - -function obj2xml(obj, options) { - let s = ''; - if (options && options.headers) { - s = '\n'; - } - if (options && options.firstUpperCase) { - obj = formatObjKey(obj, 'firstUpperCase'); - } - if (type(obj) === 'object') { - Object.keys(obj).forEach(key => { - // filter undefined or null - if (type(obj[key]) !== 'undefined' && type(obj[key]) !== 'null') { - if (type(obj[key]) === 'string' || type(obj[key]) === 'number') { - s += `<${key}>${obj[key]}`; - } else if (type(obj[key]) === 'object') { - s += `<${key}>${obj2xml(obj[key])}`; - } else if (type(obj[key]) === 'array') { - s += obj[key] - .map(keyChild => `<${key}>${obj2xml(keyChild)}`) - .join(''); - } else { - s += `<${key}>${obj[key].toString()}`; - } - } - }); - } else { - s += obj.toString(); - } - return s; -} - -exports.obj2xml = obj2xml; diff --git a/lib/common/utils/omit.d.ts b/lib/common/utils/omit.d.ts deleted file mode 100644 index b4568cff6..000000000 --- a/lib/common/utils/omit.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function omit(originalObject: {}, keysToOmit: string[]): {}; diff --git a/lib/common/utils/omit.js b/lib/common/utils/omit.js deleted file mode 100644 index c74cadab8..000000000 --- a/lib/common/utils/omit.js +++ /dev/null @@ -1,9 +0,0 @@ -function omit(originalObject, keysToOmit) { - const cloneObject = Object.assign({}, originalObject); - for (const path of keysToOmit) { - delete cloneObject[path]; - } - return cloneObject; -} - -exports.omit = omit; diff --git a/lib/common/utils/policy2Str.d.ts b/lib/common/utils/policy2Str.d.ts deleted file mode 100644 index 27cf8b53f..000000000 --- a/lib/common/utils/policy2Str.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function policy2Str(policy: string | object): any; diff --git a/lib/common/utils/policy2Str.js b/lib/common/utils/policy2Str.js deleted file mode 100644 index f7699cad9..000000000 --- a/lib/common/utils/policy2Str.js +++ /dev/null @@ -1,17 +0,0 @@ -function policy2Str(policy) { - let policyStr; - if (policy) { - if (typeof policy === 'string') { - try { - policyStr = JSON.stringify(JSON.parse(policy)); - } catch (err) { - throw new Error(`Policy string is not a valid JSON: ${err.message}`); - } - } else { - policyStr = JSON.stringify(policy); - } - } - return policyStr; -} - -exports.policy2Str = policy2Str; diff --git a/lib/common/utils/retry.d.ts b/lib/common/utils/retry.d.ts deleted file mode 100644 index 27eff2577..000000000 --- a/lib/common/utils/retry.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function retry(this: any, func: Function, retryMax: number, config?: any): (...arg: any[]) => Promise; diff --git a/lib/common/utils/retry.js b/lib/common/utils/retry.js deleted file mode 100644 index f13af381e..000000000 --- a/lib/common/utils/retry.js +++ /dev/null @@ -1,27 +0,0 @@ -function retry(func, retryMax, config = {}) { - let retryNum = 0; - const { retryDelay = 500, errorHandler = () => true } = config; - const funcR = (...arg) => { - return new Promise((resolve, reject) => { - func(...arg) - .then(result => { - retryNum = 0; - resolve(result); - }) - .catch(err => { - if (retryNum < retryMax && errorHandler(err)) { - retryNum++; - setTimeout(() => { - resolve(funcR(...arg)); - }, retryDelay); - } else { - retryNum = 0; - reject(err); - } - }); - }); - }; - return funcR; -} - -exports.retry = retry; diff --git a/lib/common/utils/setRegion.d.ts b/lib/common/utils/setRegion.d.ts deleted file mode 100644 index 89ea7d062..000000000 --- a/lib/common/utils/setRegion.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import urlutil from 'url'; -export declare function setRegion(region: string, internal?: boolean, secure?: boolean): urlutil.UrlWithStringQuery; diff --git a/lib/common/utils/setRegion.js b/lib/common/utils/setRegion.js deleted file mode 100644 index 2a7eefa2f..000000000 --- a/lib/common/utils/setRegion.js +++ /dev/null @@ -1,16 +0,0 @@ -const { parse } = require('url'); -const { checkConfigValid } = require('./checkConfigValid'); - -function setRegion(region, internal = false, secure = false) { - checkConfigValid(region, 'region'); - const protocol = secure ? 'https://' : 'http://'; - let suffix = internal ? '-internal.aliyuncs.com' : '.aliyuncs.com'; - const prefix = 'vpc100-oss-cn-'; - // aliyun VPC region: https://help.aliyun.com/knowledge_detail/38740.html - if (region.substr(0, prefix.length) === prefix) { - suffix = '.aliyuncs.com'; - } - return parse(protocol + region + suffix); -} - -exports.setRegion = setRegion; diff --git a/lib/common/utils/setSTSToken.d.ts b/lib/common/utils/setSTSToken.d.ts deleted file mode 100644 index 210d77554..000000000 --- a/lib/common/utils/setSTSToken.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function setSTSToken(this: any): Promise; -export declare function checkCredentials(obj: any): void; diff --git a/lib/common/utils/setSTSToken.js b/lib/common/utils/setSTSToken.js deleted file mode 100644 index 6a3c5f9e1..000000000 --- a/lib/common/utils/setSTSToken.js +++ /dev/null @@ -1,35 +0,0 @@ -const { formatObjKey } = require('./formatObjKey'); - -async function setSTSToken() { - if (!this.options) { this.options = {}; } - const now = new Date(); - if (this.stsTokenFreshTime) { - if (+now - this.stsTokenFreshTime >= this.options.refreshSTSTokenInterval) { - this.stsTokenFreshTime = now; - let credentials = await this.options.refreshSTSToken(); - credentials = formatObjKey(credentials, 'firstLowerCase'); - if (credentials.securityToken) { - credentials.stsToken = credentials.securityToken; - } - checkCredentials(credentials); - Object.assign(this.options, credentials); - } - } else { - this.stsTokenFreshTime = now; - } - return null; -} - -exports.setSTSToken = setSTSToken; - -function checkCredentials(obj) { - const stsTokenKey = [ 'accessKeySecret', 'accessKeyId', 'stsToken' ]; - const objKeys = Object.keys(obj); - stsTokenKey.forEach(_ => { - if (!objKeys.find(key => key === _)) { - throw Error(`refreshSTSToken must return contains ${_}`); - } - }); -} - -exports.checkCredentials = checkCredentials; diff --git a/lib/image.js b/lib/image.js deleted file mode 100644 index 6829e8210..000000000 --- a/lib/image.js +++ /dev/null @@ -1,125 +0,0 @@ -module.exports = function(OssClient) { - function ImageClient(options) { - if (!(this instanceof ImageClient)) { - return new ImageClient(options); - } - if (!options.bucket) { - throw new Error('require bucket for image service instance'); - } - if (!options.imageHost) { - throw new Error('require imageHost for image service instance'); - } - - options.endpoint = options.imageHost; - this.ossClient = new OssClient(options); - this.ossClient.options.imageHost = options.imageHost; - } - - /** - * Image operations - */ - - ImageClient.prototype.get = async function get(name, file, options) { - return await this.ossClient.get(name, file, options); - }; - - ImageClient.prototype.getStream = async function getStream(name, options) { - return await this.ossClient.getStream(name, options); - }; - - ImageClient.prototype.getExif = async function getExif(name, options) { - const params = this.ossClient._objectRequestParams('GET', `${name}@exif`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.getInfo = async function getInfo(name, options) { - const params = this.ossClient._objectRequestParams('GET', `${name}@infoexif`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.putStyle = async function putStyle(styleName, style, options) { - const params = this.ossClient._objectRequestParams('PUT', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 200 ]; - params.content = `${'\n' + - '`; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.getStyle = async function getStyle(styleName, options) { - const params = this.ossClient._objectRequestParams('GET', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.listStyle = async function listStyle(options) { - const params = this.ossClient._objectRequestParams('GET', '/?style', options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data.Style, - }; - }; - - ImageClient.prototype.deleteStyle = async function deleteStyle(styleName, options) { - const params = this.ossClient._objectRequestParams('DELETE', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 204 ]; - - const result = await this.ossClient.request(params); - return { - res: result.res, - }; - }; - - ImageClient.prototype.signatureUrl = function signatureUrl(name) { - return this.ossClient.signatureUrl(name); - }; - - ImageClient.prototype._parseResponse = async function _parseResponse(result) { - const str = result.data.toString(); - const type = result.res.headers['content-type']; - - if (type === 'application/json') { - const data = JSON.parse(str); - result.data = {}; - if (data) { - Object.keys(data).forEach(key => { - result.data[key] = parseFloat(data[key].value, 10) || data[key].value; - }); - } - } else if (type === 'application/xml') { - result.data = await this.ossClient.parseXML(str); - } - return result; - }; - - return ImageClient; -}; diff --git a/lib/index.js b/lib/index.js deleted file mode 100644 index 23dc982fd..000000000 --- a/lib/index.js +++ /dev/null @@ -1,4 +0,0 @@ -const Client = require('./client'); - -module.exports = Client; -module.exports.Client = Client; diff --git a/lib/managed-upload.js b/lib/managed-upload.js deleted file mode 100644 index 4d4ddfe4f..000000000 --- a/lib/managed-upload.js +++ /dev/null @@ -1,367 +0,0 @@ -const fs = require('fs'); -const { stat: statFile } = require('fs/promises'); -const is = require('is-type-of'); -const util = require('util'); -const path = require('path'); -const mime = require('mime'); -const { retry } = require('./common/utils/retry'); - -const proto = exports; - -/** - * Multipart operations - */ - -/** - * Upload a file to OSS using multipart uploads - * @param {String} name name - * @param {String|File|Buffer} file file - * @param {Object} options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: - * customValue = { - * key1: 'value1', - * key2: 'value2' - * } - */ -proto.multipartUpload = async function multipartUpload(name, file, options) { - this.resetCancelFlag(); - options = options || {}; - if (options.checkpoint && options.checkpoint.uploadId) { - return await this._resumeMultipart(options.checkpoint, options); - } - - const minPartSize = 100 * 1024; - if (!options.mime) { - if (Buffer.isBuffer(file)) { - options.mime = ''; - } else { - options.mime = mime.getType(path.extname(file)); - } - } - options.headers = options.headers || {}; - this._convertMetaToHeaders(options.meta, options.headers); - - const fileSize = await this._getFileSize(file); - if (fileSize < minPartSize) { - options.contentLength = fileSize; - const result = await this.put(name, file, options); - if (options && options.progress) { - await options.progress(1); - } - - const ret = { - res: result.res, - bucket: this.options.bucket, - name, - etag: result.res.headers.etag, - }; - - if ((options.headers && options.headers['x-oss-callback']) || options.callback) { - ret.data = result.data; - } - - return ret; - } - - if (options.partSize && !(parseInt(options.partSize, 10) === options.partSize)) { - throw new Error('partSize must be int number'); - } - - if (options.partSize && options.partSize < minPartSize) { - throw new Error(`partSize must not be smaller than ${minPartSize}`); - } - - const initResult = await this.initMultipartUpload(name, options); - const { uploadId } = initResult; - const partSize = this._getPartSize(fileSize, options.partSize); - - const checkpoint = { - file, - name, - fileSize, - partSize, - uploadId, - doneParts: [], - }; - - if (options && options.progress) { - await options.progress(0, checkpoint, initResult.res); - } - - return await this._resumeMultipart(checkpoint, options); -}; - -/** - * Resume multipart upload from checkpoint. The checkpoint will be - * updated after each successful part upload. - * @param {Object} checkpoint the checkpoint - * @param {Object} options options - */ -proto._resumeMultipart = async function _resumeMultipart(checkpoint, options) { - const that = this; - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint; - - const partOffs = this._divideParts(fileSize, partSize); - const numParts = partOffs.length; - let uploadPartJob = retry( - (self, partNo) => { - // eslint-disable-next-line no-async-promise-executor - return new Promise(async (resolve, reject) => { - try { - if (!self.isCancel()) { - const pi = partOffs[partNo - 1]; - const stream = await self._createStream(file, pi.start, pi.end); - const data = { - stream, - size: pi.end - pi.start, - }; - - if (Array.isArray(self.multipartUploadStreams)) { - self.multipartUploadStreams.push(data.stream); - } else { - self.multipartUploadStreams = [ data.stream ]; - } - - const removeStreamFromMultipartUploadStreams = function() { - if (!stream.destroyed) { - stream.destroy(); - } - const index = self.multipartUploadStreams.indexOf(stream); - if (index !== -1) { - self.multipartUploadStreams.splice(index, 1); - } - }; - - stream.on('close', removeStreamFromMultipartUploadStreams); - stream.on('error', removeStreamFromMultipartUploadStreams); - - let result; - try { - result = await self._uploadPart(name, uploadId, partNo, data, { - timeout: options.timeout, - }); - } catch (error) { - removeStreamFromMultipartUploadStreams(); - if (error.status === 404) { - throw self._makeAbortEvent(); - } - throw error; - } - if (!self.isCancel()) { - doneParts.push({ - number: partNo, - etag: result.res.headers.etag, - }); - checkpoint.doneParts = doneParts; - - if (options.progress) { - await options.progress(doneParts.length / (numParts + 1), checkpoint, result.res); - } - } - } - resolve(); - } catch (err) { - err.partNum = partNo; - reject(err); - } - }); - }, - this.options.retryMax, - { - errorHandler: err => { - const _errHandle = _err => { - const statusErr = [ -1, -2 ].includes(_err.status); - const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); - return statusErr && requestErrorRetryHandle(_err); - }; - return !!_errHandle(err); - }, - } - ); - - const all = Array.from(new Array(numParts), (x, i) => i + 1); - const done = doneParts.map(p => p.number); - const todo = all.filter(p => done.indexOf(p) < 0); - - const defaultParallel = 5; - const parallel = options.parallel || defaultParallel; - - // upload in parallel - const jobErr = await this._parallel( - todo, - parallel, - value => new Promise((resolve, reject) => { - uploadPartJob(that, value) - .then(() => { - resolve(); - }) - .catch(reject); - }) - ); - - const abortEvent = jobErr.find(err => err.name === 'abort'); - if (abortEvent) throw abortEvent; - - if (this.isCancel()) { - uploadPartJob = null; - throw this._makeCancelEvent(); - } - - if (jobErr && jobErr.length > 0) { - jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${ - jobErr[0].partNum - }`; - throw jobErr[0]; - } - - return await this.completeMultipartUpload(name, uploadId, doneParts, options); -}; - -/** - * Get file size - * @param {Object} file file - */ -proto._getFileSize = async function _getFileSize(file) { - if (Buffer.isBuffer(file)) { - return file.length; - } else if (is.string(file)) { - const stat = await statFile(file); - return stat.size; - } - - throw new Error('_getFileSize requires Buffer/File/String.'); -}; - -/* - * Readable stream for Web File - */ -const { Readable } = require('stream'); - -function WebFileReadStream(file, options) { - if (!(this instanceof WebFileReadStream)) { - return new WebFileReadStream(file, options); - } - - Readable.call(this, options); - - this.file = file; - // this.reader = new FileReader(); - this.start = 0; - this.finish = false; - this.fileBuffer = null; -} -util.inherits(WebFileReadStream, Readable); - -WebFileReadStream.prototype.readFileAndPush = function readFileAndPush(size) { - if (this.fileBuffer) { - let pushRet = true; - while (pushRet && this.fileBuffer && this.start < this.fileBuffer.length) { - const { start } = this; - let end = start + size; - end = end > this.fileBuffer.length ? this.fileBuffer.length : end; - this.start = end; - pushRet = this.push(this.fileBuffer.slice(start, end)); - } - } -}; - -WebFileReadStream.prototype._read = function _read(size) { - if ( - (this.file && this.start >= this.file.size) || - (this.fileBuffer && this.start >= this.fileBuffer.length) || - this.finish || - (this.start === 0 && !this.file) - ) { - if (!this.finish) { - this.fileBuffer = null; - this.finish = true; - } - this.push(null); - return; - } - - const defaultReadSize = 16 * 1024; - size = size || defaultReadSize; - - const that = this; - this.reader.onload = function(e) { - that.fileBuffer = Buffer.from(new Uint8Array(e.target.result)); - that.file = null; - that.readFileAndPush(size); - }; - this.reader.onerror = function onload(e) { - const error = e.srcElement && e.srcElement.error; - if (error) { - throw error; - } - throw e; - }; - - if (this.start === 0) { - this.reader.readAsArrayBuffer(this.file); - } else { - this.readFileAndPush(size); - } -}; - -proto._createStream = function _createStream(file, start, end) { - if (is.readableStream(file)) { - return file; - } else if (Buffer.isBuffer(file)) { - const iterable = file.subarray(start, end); - // we can't use Readable.from() since it is only support in Node v10 - return new Readable({ - read() { - this.push(iterable); - this.push(null); - }, - }); - } else if (is.string(file)) { - return fs.createReadStream(file, { - start, - end: end - 1, - }); - } - throw new Error('_createStream requires Buffer/File/String.'); -}; - -proto._getPartSize = function _getPartSize(fileSize, partSize) { - const maxNumParts = 10 * 1000; - const defaultPartSize = 1 * 1024 * 1024; - - if (!partSize) partSize = defaultPartSize; - const safeSize = Math.ceil(fileSize / maxNumParts); - - if (partSize < safeSize) { - partSize = safeSize; - console.warn( - `partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000` - ); - } - return partSize; -}; - -proto._divideParts = function _divideParts(fileSize, partSize) { - const numParts = Math.ceil(fileSize / partSize); - - const partOffs = []; - for (let i = 0; i < numParts; i++) { - const start = partSize * i; - const end = Math.min(start + partSize, fileSize); - - partOffs.push({ - start, - end, - }); - } - - return partOffs; -}; diff --git a/lib/object.js b/lib/object.js deleted file mode 100644 index 6a230ac85..000000000 --- a/lib/object.js +++ /dev/null @@ -1,369 +0,0 @@ -const fs = require('fs'); -const { stat } = require('fs/promises'); -const is = require('is-type-of'); -const copy = require('copy-to'); -const path = require('path'); -const mime = require('mime'); -const callback = require('./common/callback'); -const { retry } = require('./common/utils/retry'); -const { obj2xml } = require('./common/utils/obj2xml'); - -const proto = exports; - -/** - * Object operations - */ - -/** - * append an object from String(file path)/Buffer/ReadableStream - * @param {String} name the object key - * @param {Mixed} file String(file path)/Buffer/ReadableStream - * @param {Object} options options - * @return {Object} result - */ -proto.append = async function append(name, file, options) { - options = options || {}; - if (options.position === undefined) options.position = '0'; - options.subres = { - append: '', - position: options.position, - }; - options.method = 'POST'; - - const result = await this.put(name, file, options); - result.nextAppendPosition = result.res.headers['x-oss-next-append-position']; - return result; -}; - -/** - * put an object from String(file path)/Buffer/ReadableStream - * @param {String} name the object key - * @param {Mixed} file String(file path)/Buffer/ReadableStream - * @param {Object} options options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: - * customValue = { - * key1: 'value1', - * key2: 'value2' - * } - * @return {Object} result - */ -proto.put = async function put(name, file, options) { - let content; - options = options || {}; - name = this._objectName(name); - - if (Buffer.isBuffer(file)) { - content = file; - } else if (is.string(file)) { - const stats = await stat(file); - if (!stats.isFile()) { - throw new Error(`${file} is not file`); - } - options.mime = options.mime || mime.getType(path.extname(file)); - options.contentLength = stats.size; - const getStream = () => fs.createReadStream(file); - const putStreamStb = (objectName, makeStream, configOption) => { - return this.putStream(objectName, makeStream(), configOption); - }; - return await retry(putStreamStb, this.options.retryMax, { - errorHandler: err => { - const _errHandle = _err => { - const statusErr = [ -1, -2 ].includes(_err.status); - const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); - return statusErr && requestErrorRetryHandle(_err); - }; - if (_errHandle(err)) return true; - return false; - }, - })(name, getStream, options); - } else if (is.readableStream(file)) { - return await this.putStream(name, file, options); - } else { - throw new TypeError('Must provide String/Buffer/ReadableStream for put.'); - } - - options.headers = options.headers || {}; - this._convertMetaToHeaders(options.meta, options.headers); - - const method = options.method || 'PUT'; - const params = this._objectRequestParams(method, name, options); - - callback.encodeCallback(params, options); - - params.mime = options.mime; - params.content = content; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - const ret = { - name, - url: this._objectUrl(name), - res: result.res, - }; - - if (params.headers && params.headers['x-oss-callback']) { - ret.data = JSON.parse(result.data.toString()); - } - - return ret; -}; - -/** - * put an object from ReadableStream. - * @param {String} name the object key - * @param {Readable} stream the ReadableStream - * @param {Object} options options - * @return {Object} result - */ -proto.putStream = async function putStream(name, stream, options) { - options = options || {}; - options.headers = options.headers || {}; - name = this._objectName(name); - this._convertMetaToHeaders(options.meta, options.headers); - - const method = options.method || 'PUT'; - const params = this._objectRequestParams(method, name, options); - callback.encodeCallback(params, options); - params.mime = options.mime; - params.stream = stream; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - const ret = { - name, - url: this._objectUrl(name), - res: result.res, - }; - - if (params.headers && params.headers['x-oss-callback']) { - ret.data = JSON.parse(result.data.toString()); - } - - return ret; -}; - -proto.getStream = async function getStream(name, options) { - options = options || {}; - - if (options.process) { - options.subres = options.subres || {}; - options.subres['x-oss-process'] = options.process; - } - - const params = this._objectRequestParams('GET', name, options); - params.customResponse = true; - params.successStatuses = [ 200, 206, 304 ]; - - const result = await this.request(params); - - return { - stream: result.res, - res: { - status: result.status, - headers: result.headers, - }, - }; -}; - -proto.putMeta = async function putMeta(name, meta, options) { - return await this.copy(name, name, { - meta: meta || {}, - timeout: options && options.timeout, - ctx: options && options.ctx, - }); -}; - -proto.list = async function list(query, options) { - // prefix, marker, max-keys, delimiter - - const params = this._objectRequestParams('GET', '', options); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - let objects = result.data.Contents || []; - const that = this; - if (objects) { - if (!Array.isArray(objects)) { - objects = [ objects ]; - } - objects = objects.map(obj => ({ - name: obj.Key, - url: that._objectUrl(obj.Key), - lastModified: obj.LastModified, - etag: obj.ETag, - type: obj.Type, - size: Number(obj.Size), - storageClass: obj.StorageClass, - owner: { - id: obj.Owner.ID, - displayName: obj.Owner.DisplayName, - }, - })); - } - let prefixes = result.data.CommonPrefixes || null; - if (prefixes) { - if (!Array.isArray(prefixes)) { - prefixes = [ prefixes ]; - } - prefixes = prefixes.map(item => item.Prefix); - } - return { - res: result.res, - objects, - prefixes, - nextMarker: result.data.NextMarker || null, - isTruncated: result.data.IsTruncated === 'true', - }; -}; - -proto.listV2 = async function listV2(query = {}, options = {}) { - const continuation_token = query['continuation-token'] || query.continuationToken; - delete query['continuation-token']; - delete query.continuationToken; - if (continuation_token) { - options.subres = Object.assign( - { - 'continuation-token': continuation_token, - }, - options.subres - ); - } - const params = this._objectRequestParams('GET', '', options); - params.query = Object.assign({ 'list-type': 2 }, query); - delete params.query['continuation-token']; - delete query.continuationToken; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - let objects = result.data.Contents || []; - const that = this; - if (objects) { - if (!Array.isArray(objects)) { - objects = [ objects ]; - } - objects = objects.map(obj => ({ - name: obj.Key, - url: that._objectUrl(obj.Key), - lastModified: obj.LastModified, - etag: obj.ETag, - type: obj.Type, - size: Number(obj.Size), - storageClass: obj.StorageClass, - owner: obj.Owner - ? { - id: obj.Owner.ID, - displayName: obj.Owner.DisplayName, - } - : null, - })); - } - let prefixes = result.data.CommonPrefixes || null; - if (prefixes) { - if (!Array.isArray(prefixes)) { - prefixes = [ prefixes ]; - } - prefixes = prefixes.map(item => item.Prefix); - } - return { - res: result.res, - objects, - prefixes, - isTruncated: result.data.IsTruncated === 'true', - keyCount: +result.data.KeyCount, - continuationToken: result.data.ContinuationToken || null, - nextContinuationToken: result.data.NextContinuationToken || null, - }; -}; - -/** - * Restore Object - * @param {String} name the object key - * @param {Object} options {type : Archive or ColdArchive} - * @return {{res}} result - */ -proto.restore = async function restore(name, options = { type: 'Archive' }) { - options = options || {}; - options.subres = Object.assign({ restore: '' }, options.subres); - if (options.versionId) { - options.subres.versionId = options.versionId; - } - const params = this._objectRequestParams('POST', name, options); - if (options.type === 'ColdArchive') { - const paramsXMLObj = { - RestoreRequest: { - Days: options.Days ? options.Days : 2, - JobParameters: { - Tier: options.JobParameters ? options.JobParameters : 'Standard', - }, - }, - }; - params.content = obj2xml(paramsXMLObj, { - headers: true, - }); - params.mime = 'xml'; - } - params.successStatuses = [ 202 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -proto._objectUrl = function _objectUrl(name) { - return this._getReqUrl({ bucket: this.options.bucket, object: name }); -}; - -/** - * generator request params - * @return {Object} params - * - * @private - */ - -proto._objectRequestParams = function(method, name, options) { - if (!this.options.bucket && !this.options.cname) { - throw new Error('Please create a bucket first'); - } - - options = options || {}; - name = this._objectName(name); - const params = { - object: name, - bucket: this.options.bucket, - method, - subres: options && options.subres, - timeout: options && options.timeout, - ctx: options && options.ctx, - }; - - if (options.headers) { - params.headers = {}; - copy(options.headers).to(params.headers); - } - return params; -}; - -proto._objectName = function(name) { - return name.replace(/^\/+/, ''); -}; - -proto._convertMetaToHeaders = function(meta, headers) { - if (!meta) { - return; - } - - Object.keys(meta).forEach(k => { - headers[`x-oss-meta-${k}`] = meta[k]; - }); -}; diff --git a/lib/rtmp.js b/lib/rtmp.js deleted file mode 100644 index 53c6eed2d..000000000 --- a/lib/rtmp.js +++ /dev/null @@ -1,281 +0,0 @@ -const jstoxml = require('jstoxml'); -const utility = require('utility'); -const copy = require('copy-to'); -const urlutil = require('url'); - -const proto = exports; - -/** - * RTMP operations - */ - -/** - * Create a live channel - * @param {String} id the channel id - * @param {Object} conf the channel configuration - * @param {Object} options options - * @return {Object} result - */ -proto.putChannel = async function putChannel(id, conf, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('PUT', id, options); - params.xmlResponse = true; - params.content = jstoxml.toXML({ - LiveChannelConfiguration: conf, - }); - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let publishUrls = result.data.PublishUrls.Url; - if (!Array.isArray(publishUrls)) { - publishUrls = [ publishUrls ]; - } - let playUrls = result.data.PlayUrls.Url; - if (!Array.isArray(playUrls)) { - playUrls = [ playUrls ]; - } - - return { - publishUrls, - playUrls, - res: result.res, - }; -}; - -/** - * Get the channel info - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannel = async function getChannel(id, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - data: result.data, - res: result.res, - }; -}; - -/** - * Delete the channel - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.deleteChannel = async function deleteChannel(id, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('DELETE', id, options); - params.successStatuses = [ 204 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Set the channel status - * @param {String} id the channel id - * @param {String} status the channel status - * @param {Object} options options - * @return {Object} result - */ -proto.putChannelStatus = async function putChannelStatus(id, status, options) { - options = options || {}; - options.subres = { - live: null, - status, - }; - - const params = this._objectRequestParams('PUT', id, options); - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Get the channel status - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannelStatus = async function getChannelStatus(id, options) { - options = options || {}; - options.subres = { - live: null, - comp: 'stat', - }; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - data: result.data, - res: result.res, - }; -}; - -/** - * List the channels - * @param {Object} query the query parameters - * filter options: - * - prefix {String}: the channel id prefix (returns channels with this prefix) - * - marker {String}: the channle id marker (returns channels after this id) - * - max-keys {Number}: max number of channels to return - * @param {Object} options options - * @return {Object} result - */ -proto.listChannels = async function listChannels(query, options) { - // prefix, marker, max-keys - - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('GET', '', options); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let channels = result.data.LiveChannel || []; - if (!Array.isArray(channels)) { - channels = [ channels ]; - } - - channels = channels.map(x => { - x.PublishUrls = x.PublishUrls.Url; - if (!Array.isArray(x.PublishUrls)) { - x.PublishUrls = [ x.PublishUrls ]; - } - x.PlayUrls = x.PlayUrls.Url; - if (!Array.isArray(x.PlayUrls)) { - x.PlayUrls = [ x.PlayUrls ]; - } - - return x; - }); - - return { - channels, - nextMarker: result.data.NextMarker || null, - isTruncated: result.data.IsTruncated === 'true', - res: result.res, - }; -}; - -/** - * Get the channel history - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannelHistory = async function getChannelHistory(id, options) { - options = options || {}; - options.subres = { - live: null, - comp: 'history', - }; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let records = result.data.LiveRecord || []; - if (!Array.isArray(records)) { - records = [ records ]; - } - return { - records, - res: result.res, - }; -}; - -/** - * Create vod playlist - * @param {String} id the channel id - * @param {String} name the playlist name - * @param {Object} time the begin and end time - * time: - * - startTime {Number}: the begin time in epoch seconds - * - endTime {Number}: the end time in epoch seconds - * @param {Object} options options - * @return {Object} result - */ -proto.createVod = async function createVod(id, name, time, options) { - options = options || {}; - options.subres = { - vod: null, - }; - copy(time).to(options.subres); - - const params = this._objectRequestParams('POST', `${id}/${name}`, options); - params.query = time; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Get RTMP Url - * @param {String} channelId the channel id - * @param {Object} options options - * options: - * - expires {Number}: expire time in seconds - * - params {Object}: the parameters such as 'playlistName' - * @return {String} the RTMP url - */ -proto.getRtmpUrl = function(channelId, options) { - options = options || {}; - const expires = utility.timestamp() + (options.expires || 1800); - const res = { - bucket: this.options.bucket, - object: this._objectName(`live/${channelId}`), - }; - const resource = `/${res.bucket}/${channelId}`; - - options.params = options.params || {}; - const query = Object.keys(options.params).sort().map(x => `${x}:${options.params[x]}\n`) - .join(''); - - const stringToSign = `${expires}\n${query}${resource}`; - const signature = this.signature(stringToSign); - - const url = urlutil.parse(this._getReqUrl(res)); - url.protocol = 'rtmp:'; - url.query = { - OSSAccessKeyId: this.options.accessKeyId, - Expires: expires, - Signature: signature, - }; - copy(options.params).to(url.query); - - return url.format(); -}; diff --git a/lib/sts.js b/lib/sts.js deleted file mode 100644 index 7ffe480e7..000000000 --- a/lib/sts.js +++ /dev/null @@ -1,161 +0,0 @@ -const debug = require('util').debuglog('oss-client:sts'); -const crypto = require('crypto'); -const querystring = require('querystring'); -const copy = require('copy-to'); -const is = require('is-type-of'); -const ms = require('humanize-ms'); -const urllib = require('urllib'); - -function STS(options) { - if (!(this instanceof STS)) { - return new STS(options); - } - - if (!options - || !options.accessKeyId - || !options.accessKeySecret) { - throw new Error('require accessKeyId, accessKeySecret'); - } - - this.options = { - endpoint: options.endpoint || 'https://sts.aliyuncs.com', - format: 'JSON', - apiVersion: '2015-04-01', - sigMethod: 'HMAC-SHA1', - sigVersion: '1.0', - timeout: '60s', - }; - copy(options).to(this.options); - - // support custom agent and urllib client - if (this.options.urllib) { - this.urllib = this.options.urllib; - } else { - this.urllib = urllib; - } -} - -module.exports = STS; - -const proto = STS.prototype; - -/** - * STS opertaions - */ - -proto.assumeRole = async function assumeRole(role, policy, expiration, session, options) { - const opts = this.options; - const params = { - Action: 'AssumeRole', - RoleArn: role, - RoleSessionName: session || 'app', - DurationSeconds: expiration || 3600, - - Format: opts.format, - Version: opts.apiVersion, - AccessKeyId: opts.accessKeyId, - SignatureMethod: opts.sigMethod, - SignatureVersion: opts.sigVersion, - SignatureNonce: Math.random(), - Timestamp: new Date().toISOString(), - }; - - if (policy) { - let policyStr; - if (is.string(policy)) { - try { - policyStr = JSON.stringify(JSON.parse(policy)); - } catch (err) { - throw new Error(`Policy string is not a valid JSON: ${err.message}`); - } - } else { - policyStr = JSON.stringify(policy); - } - params.Policy = policyStr; - } - - const signature = this._getSignature('POST', params, opts.accessKeySecret); - params.Signature = signature; - - const reqUrl = opts.endpoint; - const reqParams = { - agent: this.agent, - timeout: ms((options && options.timeout) || opts.timeout), - method: 'POST', - content: querystring.stringify(params), - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, - ctx: options && options.ctx, - }; - - // try ctx.httpclient first - const urllib = reqParams.ctx?.httpclient ?? reqParams.ctx?.urllib ?? this.urllib; - const result = await urllib.request(reqUrl, reqParams); - debug( - 'response %s %s, got %s, headers: %j', - reqParams.method, reqUrl, result.status, result.headers - ); - - if (Math.floor(result.status / 100) !== 2) { - const err = await this._requestError(result); - err.params = reqParams; - throw err; - } - result.data = JSON.parse(result.data); - - return { - res: result.res, - credentials: result.data.Credentials, - }; -}; - -proto._requestError = async function _requestError(result) { - const err = new Error(); - err.status = result.status; - - try { - const resp = await JSON.parse(result.data) || {}; - err.code = resp.Code; - err.message = `${resp.Code}: ${resp.Message}`; - err.requestId = resp.RequestId; - } catch (e) { - err.message = `UnknownError: ${String(result.data)}`; - } - - return err; -}; - -proto._getSignature = function _getSignature(method, params, key) { - const that = this; - const canoQuery = Object.keys(params).sort().map(k => `${that._escape(k)}=${that._escape(params[k])}`) - .join('&'); - - const stringToSign = - `${method.toUpperCase() - }&${this._escape('/') - }&${this._escape(canoQuery)}`; - - debug('string to sign: %s', stringToSign); - - let signature = crypto.createHmac('sha1', `${key}&`); - signature = signature.update(stringToSign).digest('base64'); - - debug('signature: %s', signature); - - return signature; -}; - -/** - * Since `encodeURIComponent` doesn't encode '*', which causes - * 'SignatureDoesNotMatch'. We need do it ourselves. - * @param {String} str - input string - */ -proto._escape = function _escape(str) { - return encodeURIComponent(str) - .replace(/!/g, '%21') - .replace(/'/g, '%27') - .replace(/\(/g, '%28') - .replace(/\)/g, '%29') - .replace(/\*/g, '%2A'); -}; diff --git a/package.json b/package.json index 57a8f27b4..078073928 100644 --- a/package.json +++ b/package.json @@ -2,19 +2,39 @@ "name": "oss-client", "version": "1.2.6", "description": "Aliyun OSS(Object Storage Service) Node.js Client", - "main": "lib/index.js", - "types": "index.d.ts", + "typings": "./dist/esm/index.d.ts", "files": [ - "lib", - "index.d.ts" + "dist", + "src" ], + "type": "module", + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, "scripts": { - "lint": "eslint lib test", - "test": "egg-bin test --parallel --ts false", - "test-local": "egg-bin test --ts false", - "tsd": "tsd", - "cov": "egg-bin cov --parallel --ts false", - "ci": "npm run lint && npm run tsd && npm run cov" + "lint": "eslint src test --ext .ts", + "test": "egg-bin test", + "test-local": "egg-bin test", + "cov": "egg-bin cov", + "ci": "npm run lint && npm run cov && npm run prepublishOnly", + "contributor": "git-contributor", + "prepublishOnly": "tshy && tshy-after" }, "repository": { "type": "git", @@ -31,34 +51,33 @@ "url": "https://github.com/node-modules/oss-client/issues" }, "engines": { - "node": ">= 14.18.0" + "node": ">= 16.0.0" }, "homepage": "https://github.com/node-modules/oss-client", "dependencies": { - "address": "^1.2.0", - "copy-to": "^2.0.1", - "dateformat": "^2.0.0", - "humanize-ms": "^1.2.0", - "is-type-of": "^1.0.0", - "jstoxml": "^2.0.0", - "merge-descriptors": "^1.0.1", - "mime": "^2.4.5", - "oss-interface": "^1.0.1", - "sdk-base": "^3.6.0", - "stream-wormhole": "^1.0.4", - "urllib": "^3.18.1", - "utility": "^1.8.0", - "xml2js": "^0.4.16" + "is-type-of": "^2.0.1", + "mime": "^3.0.0", + "ms": "^2.1.3", + "oss-interface": "^1.3.0", + "stream-wormhole": "^2.0.0", + "urllib": "^3.19.2", + "utility": "^1.18.0", + "xml2js": "^0.6.2" }, "devDependencies": { "@eggjs/tsconfig": "^1.1.0", + "@types/mime": "^3.0.1", + "@types/mocha": "^10.0.1", + "@types/ms": "^0.7.31", "@types/node": "^20.3.1", + "@types/xml2js": "^0.4.12", "egg-bin": "^6.4.1", "eslint": "^8.25.0", - "eslint-config-egg": "^12.1.0", - "mm": "^3.2.0", - "sinon": "^1.17.7", - "tsd": "^0.28.1", - "typescript": "^5.1.3" - } + "eslint-config-egg": "^13.0.0", + "git-contributor": "^2.1.5", + "tshy": "^1.0.0", + "tshy-after": "^1.0.0", + "typescript": "^5.2.2" + }, + "types": "./dist/commonjs/index.d.ts" } diff --git a/src/OSSBaseClient.ts b/src/OSSBaseClient.ts new file mode 100644 index 000000000..aadcaecc6 --- /dev/null +++ b/src/OSSBaseClient.ts @@ -0,0 +1,361 @@ +import { debuglog } from 'node:util'; +import assert from 'node:assert'; +import { createHash } from 'node:crypto'; +import { extname } from 'node:path'; +import { sendToWormhole } from 'stream-wormhole'; +import { parseStringPromise } from 'xml2js'; +import utility from 'utility'; +import mime from 'mime'; +import { + HttpClient, RequestOptions, HttpClientResponse, IncomingHttpHeaders, +} from 'urllib'; +import ms from 'ms'; +import { authorization, buildCanonicalString, computeSignature } from './util/index.js'; +import { OSSRequestParams, OSSResult, RequestParameters } from './type/Request.js'; +import { OSSClientError } from './error/index.js'; + +const debug = debuglog('oss-client:client'); + +export interface OSSBaseClientInitOptions { + /** access key you create */ + accessKeyId: string; + /** access secret you create */ + accessKeySecret: string; + /** + * oss region domain. It takes priority over region. + * e.g.: + * - oss-cn-shanghai.aliyuncs.com + * - oss-cn-shanghai-internal.aliyuncs.com + */ + endpoint: string; + /** the bucket data region location, please see Data Regions, default is oss-cn-hangzhou. */ + region?: string | undefined; + /** access OSS with aliyun internal network or not, default is false. If your servers are running on aliyun too, you can set true to save lot of money. */ + internal?: boolean | undefined; + /** instance level timeout for all operations, default is 60s */ + timeout?: number | string; + isRequestPay?: boolean; +} + +export type OSSBaseClientOptions = Required & { + timeout: number; +}; + +export abstract class OSSBaseClient { + readonly #httpClient = new HttpClient(); + readonly #userAgent: string; + protected readonly options: OSSBaseClientOptions; + + constructor(options: OSSBaseClientInitOptions) { + this.options = this.#initOptions(options); + this.#userAgent = this.#getUserAgent(); + } + + /** public methods */ + + /** + * get OSS signature + */ + signature(stringToSign: string) { + debug('authorization stringToSign: %s', stringToSign); + return computeSignature(this.options.accessKeySecret, stringToSign); + } + + /** protected methods */ + + /** + * get author header + * + * "Authorization: OSS " + Access Key Id + ":" + Signature + * + * Signature = base64(hmac-sha1(Access Key Secret + "\n" + * + VERB + "\n" + * + CONTENT-MD5 + "\n" + * + CONTENT-TYPE + "\n" + * + DATE + "\n" + * + CanonicalizedOSSHeaders + * + CanonicalizedResource)) + */ + protected authorization(method: string, resource: string, headers: IncomingHttpHeaders, subResource?: RequestParameters) { + const stringToSign = buildCanonicalString(method.toUpperCase(), resource, { + headers, + parameters: subResource, + }); + debug('stringToSign: %o', stringToSign); + const auth = authorization(this.options.accessKeyId, this.options.accessKeySecret, stringToSign); + debug('authorization: %o', auth); + return auth; + } + + /** + * encodeURIComponent name except '/' + */ + protected escape(name: string) { + return utility.encodeURIComponent(name).replaceAll('%2F', '/'); + } + + protected abstract getRequestEndpoint(): string; + + protected getRequestURL(params: Pick) { + let resourcePath = '/'; + if (params.object) { + // Preserve '/' in result url + resourcePath += this.escape(params.object).replaceAll('+', '%2B'); + } + const urlObject = new URL(this.getRequestEndpoint()); + urlObject.pathname = resourcePath; + if (params.query) { + const query = params.query as Record; + for (const key in query) { + const value = query[key]; + urlObject.searchParams.set(key, `${value}`); + } + } + if (params.subResource) { + let subresAsQuery: Record = {}; + if (typeof params.subResource === 'string') { + subresAsQuery[params.subResource] = ''; + } else if (Array.isArray(params.subResource)) { + params.subResource.forEach(k => { + subresAsQuery[k] = ''; + }); + } else { + subresAsQuery = params.subResource; + } + for (const key in subresAsQuery) { + urlObject.searchParams.set(key, `${subresAsQuery[key]}`); + } + } + return urlObject.toString(); + } + + getResource(params: { bucket?: string; object?: string; }) { + let resource = '/'; + if (params.bucket) resource += `${params.bucket}/`; + if (params.object) resource += params.object; + return resource; + } + + createHttpClientRequestParams(params: OSSRequestParams) { + const headers: IncomingHttpHeaders = { + ...params.headers, + // https://help.aliyun.com/zh/oss/developer-reference/include-signatures-in-the-authorization-header + // 此次操作的时间,Date必须为GMT格式,且不能为空。该值取自请求头的Date字段或者x-oss-date字段。当这两个字段同时存在时,以x-oss-date为准。 + // e.g.: Sun, 22 Nov 2015 08:16:38 GMT + 'x-oss-date': new Date().toUTCString(), + 'user-agent': this.#userAgent, + }; + if (this.options.isRequestPay) { + headers['x-oss-request-payer'] = 'requester'; + } + if (!headers['content-type']) { + let contentType: string | null = null; + if (params.mime) { + if (params.mime.includes('/')) { + contentType = params.mime; + } else { + contentType = mime.getType(params.mime); + } + } else if (params.object) { + contentType = mime.getType(extname(params.object)); + } + if (contentType) { + headers['content-type'] = contentType; + } + } + if (params.content) { + if (!params.disabledMD5) { + if (!headers['content-md5']) { + headers['content-md5'] = createHash('md5').update(Buffer.from(params.content)).digest('base64'); + } + } + if (!headers['content-length']) { + headers['content-length'] = `${params.content.length}`; + } + } + const authResource = this.getResource(params); + headers.authorization = this.authorization(params.method, authResource, headers, params.subResource); + const url = this.getRequestURL(params); + debug('request %s %s, with headers %j, !!stream: %s', params.method, url, headers, !!params.stream); + const timeout = params.timeout ?? this.options.timeout; + const options: RequestOptions = { + method: params.method, + content: params.content, + stream: params.stream, + headers, + timeout, + writeStream: params.writeStream, + timing: true, + }; + if (params.streaming) { + options.dataType = 'stream'; + } + return { url, options }; + } + + /** + * request oss server + */ + protected async request(params: OSSRequestParams): Promise> { + const { url, options } = this.createHttpClientRequestParams(params); + const result = await this.#httpClient.request(url, options); + debug('response %s %s, got %s, headers: %j', params.method, url, result.status, result.headers); + let err; + if (!params.successStatuses?.includes(result.status)) { + err = await this.#createClientException(result); + if (params.streaming && result.res) { + // consume the response stream + await sendToWormhole(result.res); + } + throw err; + } + + let data = result.data as T; + if (params.xmlResponse) { + data = await this.#xml2json(result.data); + } + return { + data, + res: result.res, + } satisfies OSSResult; + } + + + /** private methods */ + + #initOptions(options: OSSBaseClientInitOptions) { + assert(options.accessKeyId && options.accessKeySecret, 'require accessKeyId and accessKeySecret'); + assert(options.endpoint, 'require endpoint'); + let timeout = 60000; + if (options.timeout) { + if (typeof options.timeout === 'string') { + timeout = ms(options.timeout); + } else { + timeout = options.timeout; + } + } + + const initOptions = { + accessKeyId: options.accessKeyId.trim(), + accessKeySecret: options.accessKeySecret.trim(), + endpoint: options.endpoint, + region: options.region ?? 'oss-cn-hangzhou', + internal: options.internal ?? false, + isRequestPay: options.isRequestPay ?? false, + timeout, + } satisfies OSSBaseClientOptions; + return initOptions; + } + + /** + * Get User-Agent for Node.js + * @example + * oss-client/2.0.0 Node.js/5.3.0 (darwin; arm64) + */ + #getUserAgent() { + // TODO: should read version from package.json + const sdk = 'oss-client/2.0.0'; + const platform = `Node.js/${process.version.slice(1)} (${process.platform}; ${process.arch})`; + return `${sdk} ${platform}`; + } + + async #xml2json(xml: string | Buffer) { + if (Buffer.isBuffer(xml)) { + xml = xml.toString(); + } + debug('xml2json %o', xml); + return await parseStringPromise(xml, { + explicitRoot: false, + explicitArray: false, + }) as T; + } + + async #createClientException(result: HttpClientResponse) { + let err: OSSClientError; + let requestId = result.headers['x-oss-request-id'] as string ?? ''; + let hostId = ''; + const status = result.status; + if (!result.data || !result.data.length) { + // HEAD not exists resource + if (status === 404) { + err = new OSSClientError(status, 'NoSuchKey', 'Object not exists', requestId, hostId); + } else if (status === 412) { + err = new OSSClientError(status, 'PreconditionFailed', 'Pre condition failed', requestId, hostId); + } else { + err = new OSSClientError(status, 'Unknown', `Unknown error, status=${status}, raw error=${result}`, + requestId, hostId); + } + } else { + const xml = result.data.toString(); + debug('request response error xml: %o', xml); + + let info; + try { + info = await this.#xml2json(xml); + } catch (e: any) { + err = new OSSClientError(status, 'PreconditionFailed', `${e.message} (raw xml=${JSON.stringify(xml)})`, requestId, hostId); + return err; + } + + let message = info?.Message ?? `Unknown request error, status=${result.status}, raw xml=${JSON.stringify(xml)}`; + if (info?.Condition) { + message += ` (condition=${info.Condition})`; + } + if (info?.RequestId) { + requestId = info.RequestId; + } + if (info?.HostId) { + hostId = info.HostId; + } + err = new OSSClientError(status, info?.Code ?? 'Unknown', message, requestId, hostId); + } + + debug('generate error %o', err); + return err; + } +} + +// /** +// * Object operations +// */ +// merge(proto, require('./common/object')); +// merge(proto, require('./object')); +// merge(proto, require('./common/image')); +// /** +// * Bucket operations +// */ +// merge(proto, require('./common/bucket')); +// merge(proto, require('./bucket')); +// // multipart upload +// merge(proto, require('./managed-upload')); +// /** +// * RTMP operations +// */ +// merge(proto, require('./rtmp')); + +// /** +// * common multipart-copy +// */ +// merge(proto, require('./common/multipart-copy')); +// /** +// * Common module parallel +// */ +// merge(proto, require('./common/parallel')); +// /** +// * Multipart operations +// */ +// merge(proto, require('./common/multipart')); +// /** +// * ImageClient class +// */ +// Client.ImageClient = require('./image')(Client); +// /** +// * Cluster Client class +// */ +// Client.ClusterClient = require('./cluster')(Client); + +// /** +// * STS Client class +// */ +// Client.STS = require('./sts'); + diff --git a/src/OSSObject.ts b/src/OSSObject.ts new file mode 100644 index 000000000..1e3d00cc1 --- /dev/null +++ b/src/OSSObject.ts @@ -0,0 +1,989 @@ +import { Readable, Writable } from 'node:stream'; +import { createReadStream, createWriteStream } from 'node:fs'; +import { strict as assert } from 'node:assert'; +import querystring from 'node:querystring'; +import fs from 'node:fs/promises'; +import mime from 'mime'; +import { isReadable, isWritable } from 'is-type-of'; +import type { IncomingHttpHeaders } from 'urllib'; +import type { + ListObjectsQuery, + RequestOptions, + ListObjectResult, + PutObjectOptions, + PutObjectResult, + UserMeta, + DeleteObjectOptions, + DeleteObjectResult, + GetObjectOptions, + GetObjectResult, + SignatureUrlOptions, + HeadObjectOptions, + HeadObjectResult, + IObjectSimple, + GetStreamOptions, + GetStreamResult, + CopyObjectOptions, + CopyAndPutMetaResult, +} from 'oss-interface'; +import { + OSSBaseClientInitOptions, + OSSBaseClient, +} from './OSSBaseClient.js'; +import { + ACLType, + AppendObjectOptions, + AppendObjectResult, + DeleteMultipleObject, + DeleteMultipleObjectOptions, + DeleteMultipleObjectResponse, + DeleteMultipleObjectXML, + DeleteObjectTaggingOptions, + DeleteObjectTaggingResult, + GetACLOptions, + GetACLResult, + GetSymlinkOptions, + GetSymlinkResult, + GutObjectTaggingOptions, + GutObjectTaggingResult, + ListV2ObjectResult, + ListV2ObjectsQuery, + OSSRequestParams, + OSSResult, + PutACLOptions, + PutACLResult, + PutObjectTaggingOptions, + PutObjectTaggingResult, + PutSymlinkOptions, + PutSymlinkResult, + RequestMethod, +} from './type/index.js'; +import { + checkBucketName, signatureForURL, encodeCallback, json2xml, timestamp, + checkObjectTag, computeSignature, policyToJSONString, +} from './util/index.js'; + +export interface OSSObjectClientInitOptions extends OSSBaseClientInitOptions { + bucket: string; +} + +export class OSSObject extends OSSBaseClient implements IObjectSimple { + #bucket: string; + #bucketEndpoint: string; + + constructor(options: OSSObjectClientInitOptions) { + checkBucketName(options.bucket); + super(options); + this.#bucket = options.bucket; + const urlObject = new URL(this.options.endpoint); + urlObject.hostname = `${this.#bucket}.${urlObject.hostname}`; + this.#bucketEndpoint = urlObject.toString(); + } + + /** public methods */ + + /** + * AppendObject + * @see https://help.aliyun.com/zh/oss/developer-reference/appendobject + */ + async append(name: string, file: string | Buffer | Readable, options?: AppendObjectOptions): Promise { + const position = options?.position ?? '0'; + const result = await this.#sendPutRequest(name, { + ...options, + subResource: { + append: '', + position: `${position}`, + }, + }, file, 'POST'); + return { + ...result, + nextAppendPosition: result.res.headers['x-oss-next-append-position'] as string, + }; + } + + /** + * put an object from String(file path)/Buffer/Readable + * @param {String} name the object key + * @param {Mixed} file String(file path)/Buffer/Readable + * @param {Object} options options + * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 + * {String} options.callback.url the OSS sends a callback request to this URL + * {String} options.callback.host The host header value for initiating callback requests + * {String} options.callback.body The value of the request body when a callback is initiated + * {String} options.callback.contentType The Content-Type of the callback requests initiated + * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: + * customValue = { + * key1: 'value1', + * key2: 'value2' + * } + * @return {Object} result + */ + async put(name: string, file: string | Buffer | Readable, options?: PutObjectOptions): Promise { + if (typeof file === 'string' || isReadable(file) || Buffer.isBuffer(file)) { + return await this.#sendPutRequest(name, options ?? {}, file); + } + throw new TypeError('Must provide String/Buffer/ReadableStream for put.'); + } + + /** + * put an object from ReadableStream. + */ + async putStream(name: string, stream: Readable, options?: PutObjectOptions): Promise { + return await this.#sendPutRequest(name, options ?? {}, stream); + } + + async putMeta(name: string, meta: UserMeta, options?: Omit) { + return await this.copy(name, name, { + meta, + ...options, + }); + } + + /** + * GetBucket (ListObjects) + * @see https://help.aliyun.com/zh/oss/developer-reference/listobjects + */ + async list(query?: ListObjectsQuery, options?: RequestOptions): Promise { + // prefix, marker, max-keys, delimiter + const params = this.#objectRequestParams('GET', '', options); + if (query) { + params.query = query; + } + params.xmlResponse = true; + params.successStatuses = [ 200 ]; + + const { data, res } = await this.request(params); + let objects = data.Contents || []; + if (objects) { + if (!Array.isArray(objects)) { + objects = [ objects ]; + } + objects = objects.map((obj: any) => ({ + name: obj.Key, + url: this.#objectUrl(obj.Key), + lastModified: obj.LastModified, + etag: obj.ETag, + type: obj.Type, + size: Number(obj.Size), + storageClass: obj.StorageClass, + owner: { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName, + }, + })); + } + let prefixes = data.CommonPrefixes || null; + if (prefixes) { + if (!Array.isArray(prefixes)) { + prefixes = [ prefixes ]; + } + prefixes = prefixes.map((item: any) => item.Prefix); + } + return { + res, + objects, + prefixes: prefixes || [], + nextMarker: data.NextMarker || null, + isTruncated: data.IsTruncated === 'true', + } satisfies ListObjectResult; + } + + /** + * ListObjectsV2(GetBucketV2) + * @see https://help.aliyun.com/zh/oss/developer-reference/listobjectsv2 + */ + async listV2(query?: ListV2ObjectsQuery, options?: RequestOptions): Promise { + const params = this.#objectRequestParams('GET', '', options); + params.query = { + 'list-type': '2', + }; + const continuationToken = query?.['continuation-token'] ?? query?.continuationToken; + if (continuationToken) { + // should set subResource to add sign string + params.subResource = { + 'continuation-token': continuationToken, + }; + } + if (query?.prefix) { + params.query.prefix = query.prefix; + } + if (query?.delimiter) { + params.query.delimiter = query.delimiter; + } + if (query?.['max-keys']) { + params.query['max-keys'] = `${query['max-keys']}`; + } + if (query?.['start-after']) { + params.query['start-after'] = query['start-after']; + } + if (query?.['encoding-type']) { + params.query['encoding-type'] = query['encoding-type']; + } + if (query?.['fetch-owner']) { + params.query['fetch-owner'] = 'true'; + } + params.xmlResponse = true; + params.successStatuses = [ 200 ]; + + const { data, res } = await this.request(params); + let objects = data.Contents || []; + if (objects) { + if (!Array.isArray(objects)) { + objects = [ objects ]; + } + objects = objects.map((obj: any) => ({ + name: obj.Key, + url: this.#objectUrl(obj.Key), + lastModified: obj.LastModified, + etag: obj.ETag, + type: obj.Type, + size: Number(obj.Size), + storageClass: obj.StorageClass, + owner: obj.Owner ? { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName, + } : undefined, + })); + } + let prefixes = data.CommonPrefixes || null; + if (prefixes) { + if (!Array.isArray(prefixes)) { + prefixes = [ prefixes ]; + } + prefixes = prefixes.map((item: any) => item.Prefix); + } + return { + res, + objects, + prefixes: prefixes || [], + isTruncated: data.IsTruncated === 'true', + keyCount: parseInt(data.KeyCount), + continuationToken: data.ContinuationToken, + nextContinuationToken: data.NextContinuationToken, + } satisfies ListV2ObjectResult; + } + + /** + * GetObject + * @see https://help.aliyun.com/zh/oss/developer-reference/getobject + */ + async get(name: string, options?: GetObjectOptions): Promise; + async get(name: string, file: string | Writable, options?: GetObjectOptions): Promise; + async get(name: string, file?: string | Writable | GetObjectOptions, options?: GetObjectOptions): Promise { + let writeStream: Writable | undefined; + let needDestroy = false; + + if (isWritable(file)) { + writeStream = file; + } else if (typeof file === 'string') { + writeStream = createWriteStream(file); + needDestroy = true; + } else { + // get(name, options) + options = file; + } + + options = this.#formatGetOptions(options); + let result: OSSResult; + try { + const params = this.#objectRequestParams('GET', name, options); + params.writeStream = writeStream; + params.successStatuses = [ 200, 206, 304 ]; + + result = await this.request(params); + if (needDestroy && writeStream) { + writeStream.destroy(); + } + } catch (err) { + if (needDestroy && writeStream) { + writeStream.destroy(); + // should delete the exists file before throw error + await fs.rm(file as string, { force: true }); + } + throw err; + } + + return { + res: result.res, + content: result.data, + }; + } + + async getStream(name: string, options?: GetStreamOptions): Promise { + options = this.#formatGetOptions(options); + const params = this.#objectRequestParams('GET', name, options); + params.streaming = true; + params.successStatuses = [ 200, 206, 304 ]; + const { res } = await this.request(params); + return { + stream: res, + res, + } satisfies GetStreamResult; + } + + /** + * PutObjectACL + * @see https://help.aliyun.com/zh/oss/developer-reference/putobjectacl + */ + async putACL(name: string, acl: ACLType, options?: PutACLOptions): Promise { + options = options ?? {}; + if (options.subres && !options.subResource) { + options.subResource = options.subres; + } + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.acl = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + options.headers = options.headers ?? {}; + options.headers['x-oss-object-acl'] = acl; + name = this.#objectName(name); + const params = this.#objectRequestParams('PUT', name, options); + params.successStatuses = [ 200 ]; + const { res } = await this.request(params); + return { + res, + } satisfies PutACLResult; + } + + /** + * GetObjectACL + * @see https://help.aliyun.com/zh/oss/developer-reference/getobjectacl + */ + async getACL(name: string, options?: GetACLOptions): Promise { + options = options ?? {}; + if (options.subres && !options.subResource) { + options.subResource = options.subres; + delete options.subres; + } + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.acl = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + name = this.#objectName(name); + + const params = this.#objectRequestParams('GET', name, options); + params.successStatuses = [ 200 ]; + params.xmlResponse = true; + + const { data, res } = await this.request(params); + return { + acl: data.AccessControlList.Grant, + owner: { + id: data.Owner.ID, + displayName: data.Owner.DisplayName, + }, + res, + } satisfies GetACLResult; + } + + // /** + // * Restore Object + // * @param {String} name the object key + // * @param {Object} options {type : Archive or ColdArchive} + // * @return {{res}} result + // */ + // proto.restore = async function restore(name, options = { type: 'Archive' }) { + // options = options || {}; + // options.subres = Object.assign({ restore: '' }, options.subres); + // if (options.versionId) { + // options.subres.versionId = options.versionId; + // } + // const params = this._objectRequestParams('POST', name, options); + // if (options.type === 'ColdArchive') { + // const paramsXMLObj = { + // RestoreRequest: { + // Days: options.Days ? options.Days : 2, + // JobParameters: { + // Tier: options.JobParameters ? options.JobParameters : 'Standard', + // }, + // }, + // }; + // params.content = obj2xml(paramsXMLObj, { + // headers: true, + // }); + // params.mime = 'xml'; + // } + // params.successStatuses = [ 202 ]; + + // const result = await this.request(params); + + // return { + // res: result.res, + // }; + // }; + + /** + * DeleteObject + * @see https://help.aliyun.com/zh/oss/developer-reference/deleteobject + */ + async delete(name: string, options?: DeleteObjectOptions): Promise { + const requestOptions = { + timeout: options?.timeout, + subResource: {} as Record, + }; + if (options?.versionId) { + requestOptions.subResource.versionId = options.versionId; + } + const params = this.#objectRequestParams('DELETE', name, requestOptions); + params.successStatuses = [ 204 ]; + const { res } = await this.request(params); + return { + res, + status: res.status, + headers: res.headers, + size: res.size, + rt: res.rt, + }; + } + + /** + * DeleteMultipleObjects + * @see https://help.aliyun.com/zh/oss/developer-reference/deletemultipleobjects + */ + async deleteMulti(namesOrObjects: string[] | DeleteMultipleObject[], options?: DeleteMultipleObjectOptions): Promise { + const objects: DeleteMultipleObjectXML[] = []; + assert(namesOrObjects.length > 0, 'namesOrObjects is empty'); + for (const nameOrObject of namesOrObjects) { + if (typeof nameOrObject === 'string') { + objects.push({ Key: this.#objectName(nameOrObject) }); + } else { + assert(nameOrObject.key, 'key is empty'); + objects.push({ Key: this.#objectName(nameOrObject.key), VersionId: nameOrObject.versionId }); + } + } + + const xml = json2xml({ + Delete: { + Quiet: !!options?.quiet, + Object: objects, + }, + }, { headers: true }); + + const requestOptions = { + timeout: options?.timeout, + // ?delete + subResource: { delete: '' } as Record, + }; + if (options?.versionId) { + requestOptions.subResource.versionId = options.versionId; + } + + const params = this.#objectRequestParams('POST', '', requestOptions); + params.mime = 'xml'; + params.content = Buffer.from(xml, 'utf-8'); + params.xmlResponse = true; + params.successStatuses = [ 200 ]; + const { data, res } = await this.request(params); + // quiet will return null + let deleted = data?.Deleted || []; + if (deleted) { + if (!Array.isArray(deleted)) { + deleted = [ deleted ]; + } + } + return { + res, + deleted, + } satisfies DeleteMultipleObjectResponse; + } + + /** + * HeadObject + * @see https://help.aliyun.com/zh/oss/developer-reference/headobject + */ + async head(name: string, options?: HeadObjectOptions): Promise { + options = options ?? {}; + if (options.subres && !options.subResource) { + options.subResource = options.subres; + } + if (options.versionId) { + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.versionId = options.versionId; + } + const params = this.#objectRequestParams('HEAD', name, options); + params.successStatuses = [ 200, 304 ]; + const { res } = await this.request(params); + const meta: UserMeta = {}; + const result = { + meta, + res, + status: res.status, + } satisfies HeadObjectResult; + for (const k in res.headers) { + if (k.startsWith('x-oss-meta-')) { + const key = k.substring(11); + meta[key] = res.headers[k] as string; + } + } + return result; + } + + /** + * GetObjectMeta + * @see https://help.aliyun.com/zh/oss/developer-reference/getobjectmeta + */ + async getObjectMeta(name: string, options?: HeadObjectOptions) { + options = options ?? {}; + name = this.#objectName(name); + if (options.subres && !options.subResource) { + options.subResource = options.subres; + } + if (!options.subResource) { + options.subResource = {}; + } + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + options.subResource.objectMeta = ''; + const params = this.#objectRequestParams('HEAD', name, options); + params.successStatuses = [ 200 ]; + const { res } = await this.request(params); + return { + status: res.status, + res, + }; + } + + /** + * PutSymlink + * @see https://help.aliyun.com/zh/oss/developer-reference/putsymlink + */ + async putSymlink(name: string, targetName: string, options: PutSymlinkOptions): Promise { + options = options ?? {}; + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.symlink = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + options.headers = options.headers ?? {}; + this.#convertMetaToHeaders(options.meta, options.headers); + + targetName = this.escape(this.#objectName(targetName)); + options.headers['x-oss-symlink-target'] = targetName; + if (options.storageClass) { + options.headers['x-oss-storage-class'] = options.storageClass; + } + + name = this.#objectName(name); + const params = this.#objectRequestParams('PUT', name, options); + + params.successStatuses = [ 200 ]; + const { res } = await this.request(params); + return { + res, + }; + } + + /** + * GetSymlink + * @see https://help.aliyun.com/zh/oss/developer-reference/getsymlink + */ + async getSymlink(name: string, options?: GetSymlinkOptions): Promise { + options = options ?? {}; + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.symlink = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + name = this.#objectName(name); + const params = this.#objectRequestParams('GET', name, options); + params.successStatuses = [ 200 ]; + const { res } = await this.request(params); + const target = res.headers['x-oss-symlink-target'] as string; + const meta: Record = {}; + for (const k in res.headers) { + if (k.startsWith('x-oss-meta-')) { + const key = k.substring(11); + meta[key] = res.headers[k] as string; + } + } + return { + targetName: decodeURIComponent(target), + res, + meta, + }; + } + + /** + * PutObjectTagging + * @see https://help.aliyun.com/zh/oss/developer-reference/putobjecttagging + */ + async putObjectTagging(name: string, tag: Record, options?: PutObjectTaggingOptions): Promise { + checkObjectTag(tag); + options = options ?? {}; + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.tagging = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + name = this.#objectName(name); + const params = this.#objectRequestParams('PUT', name, options); + params.successStatuses = [ 200 ]; + const tags: { Key: string; Value: string }[] = []; + for (const key in tag) { + tags.push({ Key: key, Value: tag[key] }); + } + + const paramXMLObj = { + Tagging: { + TagSet: { + Tag: tags, + }, + }, + }; + params.mime = 'xml'; + params.content = Buffer.from(json2xml(paramXMLObj)); + + const { res } = await this.request(params); + return { + res, + status: res.status, + }; + } + + /** + * GetObjectTagging + * @see https://help.aliyun.com/zh/oss/developer-reference/getobjecttagging + */ + async getObjectTagging(name: string, options?: GutObjectTaggingOptions): Promise { + options = options ?? {}; + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.tagging = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + name = this.#objectName(name); + const params = this.#objectRequestParams('GET', name, options); + params.successStatuses = [ 200 ]; + params.xmlResponse = true; + const { res, data } = await this.request(params); + // console.log(data.toString()); + let tags = data.TagSet?.Tag; + if (tags && !Array.isArray(tags)) { + tags = [ tags ]; + } + const tag: Record = {}; + if (tags) { + for (const item of tags) { + tag[item.Key] = item.Value; + } + } + return { + status: res.status, + res, + tag, + }; + } + + /** + * DeleteObjectTagging + * @see https://help.aliyun.com/zh/oss/developer-reference/deleteobjecttagging + */ + async deleteObjectTagging(name: string, options?: DeleteObjectTaggingOptions): Promise { + options = options ?? {}; + if (!options.subResource) { + options.subResource = {}; + } + options.subResource.tagging = ''; + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + name = this.#objectName(name); + const params = this.#objectRequestParams('DELETE', name, options); + params.successStatuses = [ 204 ]; + const { res } = await this.request(params); + + return { + status: res.status, + res, + }; + } + + /** + * signatureUrl URL签名 + * @see https://help.aliyun.com/zh/oss/developer-reference/signed-urls + */ + signatureUrl(name: string, options?: SignatureUrlOptions) { + options = options ?? {}; + name = this.#objectName(name); + options.method = options.method ?? 'GET'; + const expires = options.expires ?? 1800; + const expiresTimestamp = timestamp() + expires; + const params = { + bucket: this.#bucket, + object: name, + }; + const resource = this.getResource(params); + const signRes = signatureForURL(this.options.accessKeySecret, options, resource, expiresTimestamp); + + const url = this.getRequestURL({ + object: name, + subResource: { + OSSAccessKeyId: this.options.accessKeyId, + Expires: expiresTimestamp, + Signature: signRes.Signature, + ...signRes.subResource, + }, + }); + return url; + } + + async asyncSignatureUrl(name: string, options?: SignatureUrlOptions) { + return this.signatureUrl(name, options); + } + + /** + * Get Object url by name + * @param name - object name + * @param baseUrl - If provide `baseUrl`, will use `baseUrl` instead the default `endpoint and bucket`. + * return object url include bucket + */ + generateObjectUrl(name: string, baseUrl?: string) { + const urlObject = new URL(baseUrl ?? this.getRequestEndpoint()); + urlObject.pathname = this.escape(this.#objectName(name)); + return urlObject.toString(); + } + + /** + * @param policy specifies the validity of the fields in the request. + * + * return params.OSSAccessKeyId + * params.Signature + * params.policy JSON text encoded with UTF-8 and Base64. + */ + calculatePostSignature(policy: object | string) { + if (typeof policy !== 'object' && typeof policy !== 'string') { + throw new TypeError('policy must be JSON string or Object'); + } + const policyString = Buffer.from(policyToJSONString(policy), 'utf8').toString('base64'); + const Signature = computeSignature(this.options.accessKeySecret, policyString); + return { + OSSAccessKeyId: this.options.accessKeyId, + Signature, + policy: policyString, + }; + } + + /** + * Copy an object from sourceName to name. + */ + async copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; + async copy(name: string, sourceName: string, sourceBucket: string, options?: CopyObjectOptions): Promise; + async copy(name: string, sourceName: string, sourceBucket?: string | CopyObjectOptions, options?: CopyObjectOptions): Promise { + if (typeof sourceBucket === 'object') { + options = sourceBucket; // 兼容旧版本,旧版本第三个参数为options + sourceBucket = undefined; + } + options = options ?? {}; + options.headers = options.headers ?? {}; + let hasMetadata = !!options.meta; + const REPLACE_HEADERS = [ + 'content-type', + 'content-encoding', + 'content-language', + 'content-disposition', + 'cache-control', + 'expires', + ]; + for (const key in options.headers) { + const lowerCaseKey = key.toLowerCase(); + options.headers[`x-oss-copy-source-${lowerCaseKey}`] = options.headers[key]; + if (REPLACE_HEADERS.includes(lowerCaseKey)) { + hasMetadata = true; + } + } + if (hasMetadata) { + options.headers['x-oss-metadata-directive'] = 'REPLACE'; + } + this.#convertMetaToHeaders(options.meta, options.headers); + + sourceName = this.#getCopySourceName(sourceName, sourceBucket); + if (options.versionId) { + sourceName = `${sourceName}?versionId=${options.versionId}`; + } + options.headers['x-oss-copy-source'] = sourceName; + const params = this.#objectRequestParams('PUT', name, options); + params.xmlResponse = true; + params.successStatuses = [ 200, 304 ]; + const { data, res } = await this.request(params); + return { + data: data ? { + etag: data.ETag ?? '', + lastModified: data.LastModified ?? '', + } : null, + res, + } satisfies CopyAndPutMetaResult; + } + + /** + * 另存为 + * @see https://help.aliyun.com/zh/oss/user-guide/sys-or-saveas + */ + async processObjectSave(sourceObject: string, targetObject: string, process: string, targetBucket?: string) { + targetObject = this.#objectName(targetObject); + const params = this.#objectRequestParams('POST', sourceObject, { + subResource: { + 'x-oss-process': '', + }, + }); + + const bucketParam = targetBucket ? `,b_${Buffer.from(targetBucket).toString('base64')}` : ''; + targetObject = Buffer.from(targetObject).toString('base64'); + const content = { + 'x-oss-process': `${process}|sys/saveas,o_${targetObject}${bucketParam}`, + }; + params.content = Buffer.from(querystring.stringify(content)); + params.successStatuses = [ 200 ]; + + const result = await this.request(params); + return { + res: result.res, + status: result.res.status, + }; + } + + /** protected methods */ + + protected getRequestEndpoint(): string { + return this.#bucketEndpoint; + } + + /** private methods */ + + #getCopySourceName(sourceName: string, bucketName?: string) { + if (typeof bucketName === 'string') { + sourceName = this.#objectName(sourceName); + } else if (sourceName[0] !== '/') { + bucketName = this.#bucket; + } else { + bucketName = sourceName.replace(/\/(.+?)(\/.*)/, '$1'); + sourceName = sourceName.replace(/(\/.+?\/)(.*)/, '$2'); + } + checkBucketName(bucketName); + sourceName = encodeURIComponent(sourceName); + sourceName = `/${bucketName}/${sourceName}`; + return sourceName; + } + + async #sendPutRequest(name: string, options: PutObjectOptions & { subResource?: Record }, + fileOrBufferOrStream: string | Buffer | Readable, method: RequestMethod = 'PUT') { + options.headers = options.headers ?? {}; + if (options.headers['Content-Type'] && !options.headers['content-type']) { + options.headers['content-type'] = options.headers['Content-Type'] as string; + delete options.headers['Content-Type']; + } + name = this.#objectName(name); + this.#convertMetaToHeaders(options.meta, options.headers); + // don't override exists headers + if (options.callback && !options.headers['x-oss-callback']) { + const callbackOptions = encodeCallback(options.callback); + options.headers['x-oss-callback'] = callbackOptions.callback; + if (callbackOptions.callbackVar) { + options.headers['x-oss-callback-var'] = callbackOptions.callbackVar; + } + } + const params = this.#objectRequestParams(method, name, options); + if (typeof fileOrBufferOrStream === 'string') { + const stats = await fs.stat(fileOrBufferOrStream); + if (!stats.isFile()) { + throw new TypeError(`${fileOrBufferOrStream} is not file`); + } + if (!options.mime) { + const mimeFromFile = mime.getType(fileOrBufferOrStream); + if (mimeFromFile) { + options.mime = mimeFromFile; + } + } + params.stream = createReadStream(fileOrBufferOrStream); + } else if (Buffer.isBuffer(fileOrBufferOrStream)) { + params.content = fileOrBufferOrStream; + } else { + params.stream = fileOrBufferOrStream; + } + params.mime = options.mime; + params.successStatuses = [ 200 ]; + + const { res, data } = await this.request(params); + const putResult = { + name, + url: this.#objectUrl(name), + res, + data: {}, + } satisfies PutObjectResult; + + if (params.headers?.['x-oss-callback']) { + putResult.data = JSON.parse(data.toString()); + } + + return putResult; + } + + #objectUrl(name: string) { + return this.getRequestURL({ object: name }); + } + + #formatGetOptions(options?: GetObjectOptions) { + options = options ?? {}; + // 兼容老的 subres 参数 + if (options.subres && !options.subResource) { + options.subResource = options.subres; + } + if (!options.subResource) { + options.subResource = {}; + } + + if (options.versionId) { + options.subResource.versionId = options.versionId; + } + if (options.process) { + options.subResource['x-oss-process'] = options.process; + } + return options; + } + + /** + * generator request params + */ + #objectRequestParams(method: RequestMethod, name: string, + options?: Pick) { + name = this.#objectName(name); + const params: OSSRequestParams = { + object: name, + bucket: this.#bucket, + method, + headers: options?.headers, + subResource: options?.subResource, + timeout: options?.timeout, + }; + return params; + } + + #objectName(name: string) { + return name.replace(/^\/+/, ''); + } + + #convertMetaToHeaders(meta: UserMeta | undefined, headers: IncomingHttpHeaders) { + if (!meta) { + return; + } + for (const key in meta) { + headers[`x-oss-meta-${key}`] = `${meta[key]}`; + } + } +} diff --git a/src/error/OSSClientError.ts b/src/error/OSSClientError.ts new file mode 100644 index 000000000..f0d363d08 --- /dev/null +++ b/src/error/OSSClientError.ts @@ -0,0 +1,19 @@ +const REQUEST_ID_KEY = 'request-id'; +const RESPONSE_CODE_KEY = 'response-code'; +const RESPONSE_HOST_KEY = 'response-host'; + +export class OSSClientError extends Error { + code: string; + status: number; + requestId?: string; + hostId?: string; + + constructor(status: number, code: string, message: string, requestId?: string, hostId?: string) { + super(`[${REQUEST_ID_KEY}=${requestId}, ${RESPONSE_CODE_KEY}=${code}, ${RESPONSE_HOST_KEY}=${hostId}] ${message}`); + this.status = status; + this.code = code; + this.name = 'OSSClientError'; + this.requestId = requestId; + this.hostId = hostId; + } +} diff --git a/src/error/index.ts b/src/error/index.ts new file mode 100644 index 000000000..fe2e10cee --- /dev/null +++ b/src/error/index.ts @@ -0,0 +1 @@ +export * from './OSSClientError.js'; diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 000000000..e2f95cd3d --- /dev/null +++ b/src/index.ts @@ -0,0 +1,2 @@ +export * from './type/index.js'; +export * from './OSSObject.js'; diff --git a/src/type/Object.ts b/src/type/Object.ts new file mode 100644 index 000000000..abc74e741 --- /dev/null +++ b/src/type/Object.ts @@ -0,0 +1,171 @@ +import type { + DeleteObjectOptions, NormalSuccessResponse, OwnerType, RequestOptions, UserMeta, ListObjectResult, +} from 'oss-interface'; +import type { IncomingHttpHeaders } from 'urllib'; + +export interface DeleteMultipleObject { + key: string; + versionId?: string; +} + +export interface DeleteMultipleObjectXML { + Key: string; + VersionId?: string; +} + +export interface DeleteMultipleObjectOptions extends DeleteObjectOptions { + quiet?: boolean; +} + +export interface DeleteMultipleResponseObjectXML { + Key: string; + VersionId?: string; + DeleteMarker?: boolean; + DeleteMarkerVersionId?: string; +} + +export interface DeleteMultipleObjectResponse { + res: NormalSuccessResponse; + deleted: DeleteMultipleResponseObjectXML[]; +} + +export type ACLType = 'public-read-write' | 'public-read' | 'private'; + +export interface PutACLOptions extends RequestOptions { + versionId?: string; + /** additional parameters in url */ + subResource?: Record; + /** + * @alias subResource + * @deprecated + */ + subres?: Record; + headers?: IncomingHttpHeaders; +} + +export interface PutACLResult { + res: NormalSuccessResponse; +} + +export interface GetACLOptions extends RequestOptions { + versionId?: string; + /** additional parameters in url */ + subResource?: Record; + /** + * @alias subResource + * @deprecated + */ + subres?: Record; +} + +export interface GetACLResult { + acl: ACLType; + owner: OwnerType; + res: NormalSuccessResponse; +} + +export interface PutSymlinkOptions extends RequestOptions { + meta?: UserMeta; + versionId?: string; + storageClass?: string; + subResource?: Record; + headers?: IncomingHttpHeaders; +} + +export interface PutSymlinkResult { + res: NormalSuccessResponse; +} + +export interface GetSymlinkOptions extends RequestOptions { + versionId?: string; + subResource?: Record; +} + +export interface GetSymlinkResult { + targetName: string; + res: NormalSuccessResponse; + meta: Record; +} + +export interface PutObjectTaggingOptions extends RequestOptions { + versionId?: string; + subResource?: Record; +} + +export interface PutObjectTaggingResult { + res: NormalSuccessResponse; + status: number; +} + +export interface GutObjectTaggingOptions extends RequestOptions { + versionId?: string; + subResource?: Record; +} + +export interface GutObjectTaggingResult { + res: NormalSuccessResponse; + status: number; + tag: Record; +} + +export interface DeleteObjectTaggingOptions extends RequestOptions { + versionId?: string; + subResource?: Record; +} + +export interface DeleteObjectTaggingResult { + res: NormalSuccessResponse; + status: number; +} + +export interface AppendObjectOptions { + /** specify the position which is the content length of the latest object */ + position?: string | number; + /** the operation timeout */ + timeout?: number; + /** custom mime, will send with Content-Type entity header */ + mime?: string; + meta?: UserMeta; + headers?: IncomingHttpHeaders; +} + +export interface AppendObjectResult { + name: string; + /** the url of oss */ + url: string; + res: NormalSuccessResponse; + /** the next position */ + nextAppendPosition: string; +} + +export interface ListV2ObjectsQuery { + /** search object using prefix key */ + prefix?: string; + /** search start from token, including token key */ + 'continuation-token'?: string; + /** + * @alias 'continuation-token' + */ + continuationToken?: string; + /** only search current dir, not including subdir */ + delimiter?: string; + /** max objects, default is 100, limit to 1000 */ + 'max-keys'?: string | number; + /** + * The name of the object from which the list operation begins. + * If this parameter is specified, objects whose names are alphabetically greater than the start-after parameter value are returned. + */ + 'start-after'?: string; + /** Specifies whether to include the information about object owners in the response. */ + 'fetch-owner'?: boolean; + /** Specifies that the object names in the response are URL-encoded. */ + 'encoding-type'?: 'url' | ''; +} + +export interface ListV2ObjectResult extends Omit { + keyCount: number; + /** prev index */ + continuationToken?: string; + /** next index */ + nextContinuationToken?: string; +} diff --git a/src/type/Request.ts b/src/type/Request.ts new file mode 100644 index 000000000..2a794c447 --- /dev/null +++ b/src/type/Request.ts @@ -0,0 +1,40 @@ +import type { Readable, Writable } from 'node:stream'; +import type { ListObjectsQuery } from 'oss-interface'; +import type { RawResponseWithMeta, IncomingHttpHeaders } from 'urllib'; + +export type RequestParameters = string | string[] | Record; +export type RequestQuery = Record | ListObjectsQuery; +export type RequestMethod = 'GET' | 'HEAD' | 'POST' | 'PUT' | 'PATCH' | 'DELETE'; + +export interface Request { + headers: IncomingHttpHeaders; + parameters?: RequestParameters; +} + +export interface OSSRequestParams { + method: RequestMethod; + headers?: IncomingHttpHeaders; + bucket?: string; + object?: string; + query?: RequestQuery; + mime?: string; + content?: Buffer; + disabledMD5?: boolean; + stream?: Readable; + writeStream?: Writable; + timeout?: number; + /** + * set request query params + * e.g.: + * - DELETE object `versionId` + */ + subResource?: RequestParameters; + xmlResponse?: boolean; + streaming?: boolean; + successStatuses?: number[]; +} + +export interface OSSResult { + data: T; + res: RawResponseWithMeta; +} diff --git a/src/type/index.ts b/src/type/index.ts new file mode 100644 index 000000000..77a027c20 --- /dev/null +++ b/src/type/index.ts @@ -0,0 +1,2 @@ +export * from './Object.js'; +export * from './Request.js'; diff --git a/lib/common/utils/checkBucketName.js b/src/util/checkBucketName.ts similarity index 50% rename from lib/common/utils/checkBucketName.js rename to src/util/checkBucketName.ts index c6f787cfd..07c089685 100644 --- a/lib/common/utils/checkBucketName.js +++ b/src/util/checkBucketName.ts @@ -1,6 +1,6 @@ -exports.checkBucketName = (name, createBucket = false) => { +export function checkBucketName(name: string, createBucket = false) { const bucketRegex = createBucket ? /^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/ : /^[a-z0-9_][a-z0-9-_]{1,61}[a-z0-9_]$/; if (!bucketRegex.test(name)) { - throw new Error('The bucket must be conform to the specifications'); + throw new TypeError('The bucket must be conform to the specifications'); } -}; +} diff --git a/src/util/checkObjectTag.ts b/src/util/checkObjectTag.ts new file mode 100644 index 000000000..745bb9f71 --- /dev/null +++ b/src/util/checkObjectTag.ts @@ -0,0 +1,25 @@ +const ALLOW_STRING_RE = /^[a-zA-Z0-9 +-=._:/]+$/; + +export function checkObjectTag(tag: Record) { + if (typeof tag !== 'object') { + throw new TypeError('tag must be Object'); + } + const entries = Object.entries(tag); + if (entries.length > 10) { + throw new TypeError('maximum of 10 tags for a object'); + } + for (const [ key, value ] of entries) { + if (typeof key !== 'string' || typeof value !== 'string') { + throw new TypeError('the key and value of the tag must be String'); + } + if (!ALLOW_STRING_RE.test(key) || (value.length > 0 && !ALLOW_STRING_RE.test(value))) { + throw new TypeError('tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)'); + } + if (key.length < 1 || key.length > 128) { + throw new TypeError('tag key can be a minimum of 1 byte and a maximum of 128 bytes in length'); + } + if (value.length > 256) { + throw new TypeError('tag value can be a maximum of 256 bytes in length'); + } + } +} diff --git a/src/util/date.ts b/src/util/date.ts new file mode 100644 index 000000000..6382c90ba --- /dev/null +++ b/src/util/date.ts @@ -0,0 +1,7 @@ +/** + * Get Unix's timestamp in seconds + * 一个 Unix 时间戳(自UTC时间1970年01月01号开始的秒数) + */ +export function timestamp() { + return Math.round(Date.now() / 1000); +} diff --git a/src/util/encodeCallback.ts b/src/util/encodeCallback.ts new file mode 100644 index 000000000..582ea2643 --- /dev/null +++ b/src/util/encodeCallback.ts @@ -0,0 +1,33 @@ +import type { ObjectCallback } from 'oss-interface'; + +export interface CallbackOptions { + callback: string; + callbackVar?: string; +} + +export function encodeCallback(objectCallback: ObjectCallback) { + const data: Record = { + // must use encodeURI not encodeURIComponent + callbackUrl: encodeURI(objectCallback.url), + callbackBody: objectCallback.body, + }; + if (objectCallback.host) { + data.callbackHost = objectCallback.host; + } + if (objectCallback.contentType) { + data.callbackBodyType = objectCallback.contentType; + } + const callbackHeaderValue = Buffer.from(JSON.stringify(data)).toString('base64'); + const options: CallbackOptions = { + callback: callbackHeaderValue, + }; + + if (objectCallback.customValue) { + const callbackVar: Record = {}; + for (const key in objectCallback.customValue) { + callbackVar[`x:${key}`] = objectCallback.customValue[key].toString(); + } + options.callbackVar = Buffer.from(JSON.stringify(callbackVar)).toString('base64'); + } + return options; +} diff --git a/src/util/index.ts b/src/util/index.ts new file mode 100644 index 000000000..665adb335 --- /dev/null +++ b/src/util/index.ts @@ -0,0 +1,8 @@ +export * from './checkBucketName.js'; +export * from './checkObjectTag.js'; +export * from './date.js'; +export * from './encodeCallback.js'; +export * from './isIP.js'; +export * from './json2xml.js'; +export * from './policyToJSONString.js'; +export * from './sign.js'; diff --git a/src/util/isIP.ts b/src/util/isIP.ts new file mode 100644 index 000000000..c0bc5e4cb --- /dev/null +++ b/src/util/isIP.ts @@ -0,0 +1,5 @@ +import { isIP as _isIP } from 'node:net'; + +export function isIP(address: string) { + return _isIP(address) > 0; +} diff --git a/src/util/json2xml.ts b/src/util/json2xml.ts new file mode 100644 index 000000000..7237d23f2 --- /dev/null +++ b/src/util/json2xml.ts @@ -0,0 +1,26 @@ +import utility from 'utility'; + +export function json2xml(json: Record, options?: { headers: boolean }) { + let xml = ''; + if (options?.headers) { + xml = '\n'; + } + for (const key in json) { + const value = json[key]; + if (value === null || value === undefined) continue; + if (Array.isArray(value)) { + for (const item of value) { + xml += `<${key}>`; + xml += json2xml(item); + xml += ``; + } + } else if (typeof value === 'object') { + xml += `<${key}>`; + xml += json2xml(value); + xml += ``; + } else { + xml += `<${key}>${utility.escape(value.toString())}`; + } + } + return xml; +} diff --git a/src/util/policyToJSONString.ts b/src/util/policyToJSONString.ts new file mode 100644 index 000000000..b5e5c99e0 --- /dev/null +++ b/src/util/policyToJSONString.ts @@ -0,0 +1,13 @@ +export function policyToJSONString(policy: object | string) { + let policyJSONString: string; + if (typeof policy === 'string') { + try { + policyJSONString = JSON.stringify(JSON.parse(policy)); + } catch (err: any) { + throw new TypeError(`Policy string is not a valid JSON: ${err.message}`); + } + } else { + policyJSONString = JSON.stringify(policy); + } + return policyJSONString; +} diff --git a/src/util/sign.ts b/src/util/sign.ts new file mode 100644 index 000000000..2002ebf75 --- /dev/null +++ b/src/util/sign.ts @@ -0,0 +1,153 @@ +import { debuglog } from 'node:util'; +import crypto from 'node:crypto'; +import type { IncomingHttpHeaders } from 'urllib'; +import type { SignatureUrlOptions } from 'oss-interface'; +import type { Request, RequestParameters } from '../type/Request.js'; +import { encodeCallback } from './encodeCallback.js'; + +const debug = debuglog('oss-client:sign'); +const OSS_PREFIX = 'x-oss-'; + +/** + * build canonicalized resource + * @see https://help.aliyun.com/zh/oss/developer-reference/include-signatures-in-the-authorization-header#section-rvv-dx2-xdb + */ +function buildCanonicalizedResource(resourcePath: string, parameters?: RequestParameters) { + let canonicalizedResource = `${resourcePath}`; + let separatorString = '?'; + + if (typeof parameters === 'string') { + if (parameters.trim()) { + canonicalizedResource += separatorString + parameters; + } + } else if (Array.isArray(parameters)) { + parameters.sort(); + canonicalizedResource += separatorString + parameters.join('&'); + } else if (parameters) { + const compareFunc = (entry1: string, entry2: string) => { + if (entry1[0] > entry2[0]) { + return 1; + } else if (entry1[0] < entry2[0]) { + return -1; + } + return 0; + }; + const processFunc = (key: string) => { + canonicalizedResource += separatorString + key; + if (parameters[key] || parameters[key] === 0) { + canonicalizedResource += `=${parameters[key]}`; + } + separatorString = '&'; + }; + Object.keys(parameters).sort(compareFunc).forEach(processFunc); + } + debug('canonicalizedResource: %o', canonicalizedResource); + return canonicalizedResource; +} + +function lowercaseKeyHeader(headers: IncomingHttpHeaders) { + const lowercaseHeaders: IncomingHttpHeaders = {}; + if (headers) { + for (const name in headers) { + lowercaseHeaders[name.toLowerCase()] = headers[name]; + } + } + return lowercaseHeaders; +} + +export function buildCanonicalString(method: string, resourcePath: string, request: Request, expiresTimestamp?: string) { + const headers = lowercaseKeyHeader(request.headers); + const headersToSign: IncomingHttpHeaders = {}; + const signContent: string[] = [ + method.toUpperCase(), + headers['content-md5'] as string ?? '', + headers['content-type']!, + expiresTimestamp || headers['x-oss-date'] as string, + ]; + + Object.keys(headers).forEach(key => { + if (key.startsWith(OSS_PREFIX)) { + headersToSign[key] = String(headers[key]).trim(); + } + }); + + Object.keys(headersToSign).sort().forEach(key => { + signContent.push(`${key}:${headersToSign[key]}`); + }); + signContent.push(buildCanonicalizedResource(resourcePath, request.parameters)); + + return signContent.join('\n'); +} + +export function computeSignature(accessKeySecret: string, canonicalString: string) { + const signature = crypto.createHmac('sha1', accessKeySecret); + return signature.update(Buffer.from(canonicalString)).digest('base64'); +} + +export function authorization(accessKeyId: string, accessKeySecret: string, canonicalString: string) { + // https://help.aliyun.com/zh/oss/developer-reference/include-signatures-in-the-authorization-header + return `OSS ${accessKeyId}:${computeSignature(accessKeySecret, canonicalString)}`; +} + +export function signatureForURL(accessKeySecret: string, options: SignatureUrlOptions, + resource: string, expiresTimestamp: number) { + const headers: Record = {}; + const subResource = options.subResource ?? {}; + + if (options.process) { + subResource['x-oss-process'] = options.process; + } + + if (options.trafficLimit) { + subResource['x-oss-traffic-limit'] = `${options.trafficLimit}`; + } + + if (options.response) { + const customResponseHeaders = options.response as Record; + for (const k in customResponseHeaders) { + subResource[`response-${k.toLowerCase()}`] = customResponseHeaders[k]; + } + } + + if (options['Content-MD5'] && !options['content-md5']) { + options['content-md5'] = options['Content-MD5']; + } + if (options['Content-Md5'] && !options['content-md5']) { + options['content-md5'] = options['Content-Md5']; + } + if (options['content-md5']) { + headers['content-md5'] = options['content-md5']; + } + if (options['Content-Type'] && !options['content-type']) { + options['content-type'] = options['Content-Type']; + } + if (options['content-type']) { + headers['content-type'] = options['content-type']; + } + + // copy other x-oss-* headers + for (const key in options) { + const lowerKey = key.toLowerCase(); + if (lowerKey.startsWith('x-oss-')) { + headers[lowerKey] = options[key]; + } + } + + if (options.callback) { + const callbackOptions = encodeCallback(options.callback); + subResource.callback = callbackOptions.callback; + if (callbackOptions.callbackVar) { + subResource['callback-var'] = callbackOptions.callbackVar; + } + } + + const canonicalString = buildCanonicalString(options.method!, resource, { + headers, + parameters: subResource, + }, `${expiresTimestamp}`); + + return { + Signature: computeSignature(accessKeySecret, canonicalString), + subResource, + }; +} diff --git a/test/OSSObject.test.ts b/test/OSSObject.test.ts new file mode 100644 index 000000000..09bf6738b --- /dev/null +++ b/test/OSSObject.test.ts @@ -0,0 +1,2261 @@ +import { strict as assert } from 'node:assert'; +import { fileURLToPath } from 'node:url'; +import { createReadStream, createWriteStream, existsSync, readFileSync } from 'node:fs'; +import { readFile, writeFile, stat } from 'node:fs/promises'; +import { pipeline } from 'node:stream/promises'; +import path from 'node:path'; +import os from 'node:os'; +import { createHash, randomUUID } from 'node:crypto'; +import { ObjectMeta } from 'oss-interface'; +import urllib, { IncomingHttpHeaders, RawResponseWithMeta } from 'urllib'; +import config from './config.js'; +import { OSSObject } from '../src/index.js'; +import { OSSClientError } from '../src/error/OSSClientError.js'; +import { Readable } from 'node:stream'; + +describe('test/OSSObject.test.ts', () => { + const tmpdir = os.tmpdir(); + const prefix = config.prefix; + assert(config.oss.accessKeyId); + assert(config.oss.accessKeySecret); + const ossObject = new OSSObject(config.oss); + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + + describe('list()', () => { + // oss.jpg + // fun/test.jpg + // fun/movie/001.avi + // fun/movie/007.avi + const listPrefix = `${prefix}oss-client/list/`; + before(async () => { + await ossObject.put(`${listPrefix}oss.jpg`, Buffer.from('oss.jpg')); + await ossObject.put(`${listPrefix}fun/test.jpg`, Buffer.from('fun/test.jpg')); + await ossObject.put(`${listPrefix}fun/movie/001.avi`, Buffer.from('fun/movie/001.avi')); + await ossObject.put(`${listPrefix}fun/movie/007.avi`, Buffer.from('fun/movie/007.avi')); + await ossObject.put(`${listPrefix}other/movie/007.avi`, Buffer.from('other/movie/007.avi')); + await ossObject.put(`${listPrefix}other/movie/008.avi`, Buffer.from('other/movie/008.avi')); + }); + + function checkObjectProperties(obj: ObjectMeta) { + assert.equal(typeof obj.name, 'string'); + assert.equal(typeof obj.lastModified, 'string'); + assert.equal(typeof obj.etag, 'string'); + assert(obj.type === 'Normal' || obj.type === 'Multipart'); + assert.equal(typeof obj.size, 'number'); + assert.equal(obj.storageClass, 'Standard'); + assert.equal(typeof obj.owner, 'object'); + assert.equal(typeof obj.owner!.id, 'string'); + assert.equal(typeof obj.owner!.displayName, 'string'); + } + + it('should list with query', async () => { + const result = await ossObject.list(); + assert(result.objects.length > 0); + // console.log(result.objects); + result.objects.map(checkObjectProperties); + assert.equal(typeof result.nextMarker, 'string'); + assert(result.isTruncated); + assert.deepEqual(result.prefixes, []); + assert(result.res.headers.date); + const obj = result.objects[0]; + assert.match(obj.url, /^https:\/\//); + assert(obj.url.endsWith(`/${obj.name}`)); + assert(obj.owner!.id); + assert(obj.size > 0); + }); + + it('should list timeout work', async () => { + await assert.rejects(async () => { + await ossObject.list({}, { timeout: 1 }); + }, (err: Error) => { + assert.match(err.message, /Request timeout for 1 ms/); + assert.equal(err.name, 'HttpClientRequestTimeoutError'); + return true; + }); + }); + + it('should list only 1 object', async () => { + const result = await ossObject.list({ + 'max-keys': 1, + }); + assert(result.objects.length <= 1); + result.objects.map(checkObjectProperties); + assert.equal(typeof result.nextMarker, 'string'); + assert(result.isTruncated); + assert.deepEqual(result.prefixes, []); + assert(result.res.headers.date); + const obj = result.objects[0]; + assert.match(obj.url, /^https:\/\//); + assert(obj.url.endsWith(`/${obj.name}`)); + assert(obj.owner!.id); + assert(obj.size > 0); + }); + + it('should list top 3 objects', async () => { + const result = await ossObject.list({ + 'max-keys': 3, + }); + assert(result.objects.length <= 3); + result.objects.map(checkObjectProperties); + assert.equal(typeof result.nextMarker, 'string'); + assert(result.isTruncated); + assert.deepEqual(result.prefixes, []); + + // next 2 + const result2 = await ossObject.list({ + 'max-keys': '2', + marker: result.nextMarker, + }); + assert.equal(result2.objects.length, 2); + result.objects.map(checkObjectProperties); + assert.equal(typeof result2.nextMarker, 'string'); + assert(result2.isTruncated); + assert.deepEqual(result2.prefixes, []); + }); + + it('should list with prefix', async () => { + let result = await ossObject.list({ + prefix: `${listPrefix}fun/movie/`, + }); + assert.equal(result.objects.length, 2); + result.objects.map(checkObjectProperties); + assert.equal(result.nextMarker, null); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + + result = await ossObject.list({ + prefix: `${listPrefix}fun/movie`, + }); + assert.equal(result.objects.length, 2); + result.objects.map(checkObjectProperties); + assert.equal(result.nextMarker, null); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + }); + + it('should list current dir files only', async () => { + let result = await ossObject.list({ + prefix: listPrefix, + delimiter: '/', + }); + assert.equal(result.objects.length, 1); + result.objects.map(checkObjectProperties); + assert.equal(result.nextMarker, null); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, [ `${listPrefix}fun/`, `${listPrefix}other/` ]); + + result = await ossObject.list({ + prefix: `${listPrefix}fun/`, + delimiter: '/', + }); + assert.equal(result.objects.length, 1); + result.objects.map(checkObjectProperties); + assert.equal(result.nextMarker, null); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, [ `${listPrefix}fun/movie/` ]); + + result = await ossObject.list({ + prefix: `${listPrefix}fun/movie/`, + delimiter: '/', + }); + assert.equal(result.objects.length, 2); + result.objects.map(checkObjectProperties); + assert.equal(result.nextMarker, null); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + }); + }); + + describe('listV2()', () => { + const listPrefix = `${prefix}oss-client/listV2/`; + before(async () => { + await ossObject.put(`${listPrefix}oss.jpg`, Buffer.from('oss.jpg')); + await ossObject.put(`${listPrefix}fun/test.jpg`, Buffer.from('fun/test.jpg')); + await ossObject.put(`${listPrefix}fun/movie/001.avi`, Buffer.from('fun/movie/001.avi')); + await ossObject.put(`${listPrefix}fun/movie/007.avi`, Buffer.from('fun/movie/007.avi')); + await ossObject.put(`${listPrefix}other/movie/007.avi`, Buffer.from('other/movie/007.avi')); + await ossObject.put(`${listPrefix}other/movie/008.avi`, Buffer.from('other/movie/008.avi')); + }); + + function checkObjectProperties(obj: ObjectMeta, options?: { owner: boolean }) { + assert.equal(typeof obj.name, 'string'); + assert.equal(typeof obj.lastModified, 'string'); + assert.equal(typeof obj.etag, 'string'); + assert(obj.type === 'Normal' || obj.type === 'Multipart'); + assert.equal(typeof obj.size, 'number'); + assert.equal(obj.storageClass, 'Standard'); + if (options?.owner) { + assert(typeof obj.owner!.id === 'string' && typeof obj.owner!.displayName === 'string'); + } else { + assert.equal(obj.owner, undefined); + } + } + + it('should list top 3 objects', async () => { + const result = await ossObject.listV2({ + 'max-keys': 1, + }); + assert.equal(result.objects.length, 1); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(typeof result.nextContinuationToken, 'string'); + assert(result.isTruncated); + assert.deepEqual(result.prefixes, []); + assert.equal(result.keyCount, 1); + + // next 2 + const result2 = await ossObject.listV2({ + 'max-keys': '2', + continuationToken: result.nextContinuationToken, + }); + assert.equal(result2.objects.length, 2); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(typeof result2.nextContinuationToken, 'string'); + assert(result2.isTruncated); + assert.deepEqual(result2.prefixes, []); + assert.equal(result2.keyCount, 2); + }); + + it('should list with prefix', async () => { + let result = await ossObject.listV2({ + prefix: `${listPrefix}fun/movie/`, + 'fetch-owner': true, + }); + assert.equal(result.objects.length, 2); + result.objects.forEach(obj => checkObjectProperties(obj, { owner: true })); + assert.equal(result.nextContinuationToken, undefined); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + + result = await ossObject.listV2({ + prefix: `${listPrefix}fun/movie`, + }); + assert.equal(result.objects.length, 2); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(result.nextContinuationToken, undefined); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + }); + + it('should list current dir files only', async () => { + let result = await ossObject.listV2({ + prefix: listPrefix, + delimiter: '/', + }); + assert.equal(result.objects.length, 1); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(result.nextContinuationToken, undefined); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, [ `${listPrefix}fun/`, `${listPrefix}other/` ]); + + result = await ossObject.listV2({ + prefix: `${listPrefix}fun/`, + delimiter: '/', + }); + assert.equal(result.objects.length, 1); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(result.nextContinuationToken, undefined); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, [ `${listPrefix}fun/movie/` ]); + + result = await ossObject.listV2({ + prefix: `${listPrefix}fun/movie/`, + delimiter: '/', + }); + assert.equal(result.objects.length, 2); + result.objects.forEach(obj => checkObjectProperties(obj)); + assert.equal(result.nextContinuationToken, undefined); + assert(!result.isTruncated); + assert.deepEqual(result.prefixes, []); + }); + + it('should list with start-after', async () => { + let result = await ossObject.listV2({ + 'start-after': `${listPrefix}fun`, + 'max-keys': 1, + }); + assert(result.objects[0].name === `${listPrefix}fun/movie/001.avi`); + + result = await ossObject.listV2({ + 'start-after': `${listPrefix}fun/movie/001.avi`, + 'max-keys': 1, + }); + assert(result.objects[0].name === `${listPrefix}fun/movie/007.avi`); + + result = await ossObject.listV2({ + delimiter: '/', + prefix: `${listPrefix}fun/movie/`, + 'start-after': `${listPrefix}fun/movie/002.avi`, + }); + assert(result.objects.length === 1); + assert(result.objects[0].name === `${listPrefix}fun/movie/007.avi`); + + result = await ossObject.listV2({ + prefix: `${listPrefix}`, + 'max-keys': 5, + 'start-after': `${listPrefix}a`, + delimiter: '/', + }); + assert.equal(result.keyCount, 3); + assert.equal(result.objects.length, 1); + assert.equal(result.objects[0].name, `${listPrefix}oss.jpg`); + assert.equal(result.prefixes.length, 2); + assert.equal(result.prefixes[0], `${listPrefix}fun/`); + assert.equal(result.prefixes[1], `${listPrefix}other/`); + + result = await ossObject.listV2({ + prefix: `${listPrefix}`, + 'max-keys': 5, + 'start-after': `${listPrefix}oss.jpg`, + delimiter: '/', + }); + assert.equal(result.keyCount, 1); + assert.equal(result.objects.length, 0); + assert.equal(result.prefixes[0], `${listPrefix}other/`); + }); + + it('should list with continuation-token', async () => { + let nextContinuationToken: string | undefined; + let keyCount = 0; + do { + // eslint-disable-next-line no-await-in-loop + const result = await ossObject.listV2({ + prefix: listPrefix, + 'max-keys': 2, + 'continuation-token': nextContinuationToken, + }); + if (nextContinuationToken) { + // should has prev index + assert(result.continuationToken); + } + keyCount += result.keyCount; + nextContinuationToken = result.nextContinuationToken; + } while (nextContinuationToken); + assert.equal(keyCount, 6); + }); + }); + + describe('append()', () => { + const name = `/${prefix}oss-client/oss/append${Date.now()}`; + afterEach(async () => { + await ossObject.delete(name); + }); + + it('should append object with content buffer', async () => { + let object = await ossObject.append(name, Buffer.from('foo')); + assert.equal(object.res.status, 200); + assert.equal(object.nextAppendPosition, '3'); + assert.equal(object.res.headers['x-oss-next-append-position'], '3'); + assert(object.url); + assert(object.name); + + let res = await ossObject.get(name); + assert.equal(res.content.toString(), 'foo'); + assert.equal(res.res.headers['x-oss-next-append-position'], '3'); + + object = await ossObject.append(name, Buffer.from('bar'), { + position: 3, + }); + assert.equal(object.res.status, 200); + assert.equal(object.nextAppendPosition, '6'); + assert.equal(object.res.headers['x-oss-next-append-position'], '6'); + + res = await ossObject.get(name); + assert.equal(res.content.toString(), 'foobar'); + assert.equal(res.res.headers['x-oss-next-append-position'], '6'); + + object = await ossObject.append(name, Buffer.from(', ok'), { + position: '6', + }); + assert.equal(object.res.status, 200); + assert.equal(object.nextAppendPosition, '10'); + assert.equal(object.res.headers['x-oss-next-append-position'], '10'); + + res = await ossObject.get(name); + assert.equal(res.content.toString(), 'foobar, ok'); + assert.equal(res.res.headers['x-oss-next-append-position'], '10'); + }); + + it('should append object with local file path', async () => { + const file = path.join(__dirname, 'fixtures/foo.js'); + let object = await ossObject.append(name, file); + assert.equal(object.nextAppendPosition, '16'); + + object = await ossObject.append(name, file, { position: 16 }); + assert.equal(object.nextAppendPosition, '32'); + }); + + it('should append object with readstream', async () => { + const file = path.join(__dirname, 'fixtures/foo.js'); + let object = await ossObject.append(name, createReadStream(file)); + assert.equal(object.nextAppendPosition, '16'); + + object = await ossObject.append(name, createReadStream(file), { + position: 16, + }); + assert.equal(object.nextAppendPosition, '32'); + }); + + it('should error when position not match', async () => { + await ossObject.append(name, Buffer.from('foo')); + await assert.rejects(async () => { + await ossObject.append(name, Buffer.from('foo')); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'PositionNotEqualToLength'); + assert.equal(err.status, 409); + assert.match(err.message, /Position is not equal to file length/); + return true; + }); + }); + + it('should use nextAppendPosition to append next', async () => { + let object = await ossObject.append(name, Buffer.from('foo')); + assert.equal(object.nextAppendPosition, '3'); + + object = await ossObject.append(name, Buffer.from('bar'), { + position: object.nextAppendPosition, + }); + + object = await ossObject.append(name, Buffer.from(', baz'), { + position: object.nextAppendPosition, + }); + assert.equal(object.nextAppendPosition, '11'); + + const res = await ossObject.get(name); + assert.equal(res.content.toString(), 'foobar, baz'); + assert.equal(res.res.headers['x-oss-next-append-position'], '11'); + }); + }); + + describe('mimetype', () => { + const createFile = async (filepath: string, size?: number) => { + size = size ?? 200 * 1024; + const rs = createReadStream('/dev/random', { + start: 0, + end: size - 1, + }); + await pipeline(rs, createWriteStream(filepath)); + return filepath; + }; + + it('should set mimetype by file ext', async () => { + const filepath = path.join(tmpdir, 'content-type-by-file.jpg'); + await createFile(filepath); + const name = `${prefix}oss-client/oss/content-type-by-file.png`; + await ossObject.put(name, filepath); + + const result = await ossObject.head(name); + assert.equal(result.res.headers['content-type'], 'image/jpeg'); + + // await ossObject.multipartUpload(name, filepath); + // result = await ossObject.head(name); + // assert.equal(result.res.headers['content-type'], 'image/jpeg'); + }); + + it('should set mimetype by object key', async () => { + const filepath = path.join(tmpdir, 'content-type-by-file'); + await createFile(filepath); + const name = `${prefix}oss-client/oss/content-type-by-file.png`; + await ossObject.put(name, filepath); + + const result = await ossObject.head(name); + assert.equal(result.res.headers['content-type'], 'image/png'); + // await ossObject.multipartUpload(name, filepath); + // result = await ossObject.head(name); + // assert.equal(result.res.headers['content-type'], 'image/png'); + }); + + it('should set user-specified mimetype', async () => { + const filepath = path.join(tmpdir, 'content-type-by-file.jpg'); + await createFile(filepath); + const name = `${prefix}oss-client/oss/content-type-by-file.png`; + await ossObject.put(name, filepath, { mime: 'text/plain' }); + + const result = await ossObject.head(name); + assert.equal(result.res.headers['content-type'], 'text/plain'); + // await ossObject.multipartUpload(name, filepath, { + // mime: 'text/plain', + // }); + // result = await ossObject.head(name); + // assert.equal(result.res.headers['content-type'], 'text/plain'); + }); + }); + + describe('put()', () => { + let name: string; + afterEach(async () => { + await ossObject.delete(name); + }); + + it('should add object with local file path', async () => { + name = `${prefix}oss-client/oss/put-localfile-${randomUUID()}.js`; + // put not exists name + const object = await ossObject.put(name, __filename); + assert.equal(object.res.status, 200); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.res.size, 0); + assert.equal(object.name, name); + + // put exists name + const object2 = await ossObject.put(name, __filename); + assert.equal(object.res.status, 200); + assert.equal(typeof object2.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object2.res.rt, 'number'); + assert.equal(object2.res.size, 0); + assert.equal(object2.name, name); + + // put with callback fail + await assert.rejects(async () => { + await ossObject.put(name, __filename, { + callback: { + url: 'https://help.aliyun.com/zh/oss/support/0007-00000205', + body: 'foo=bar', + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'CallbackFailed'); + assert(err.hostId); + assert(err.requestId); + assert.match(err.message, /Response body is not valid json format\./); + return true; + }); + + // delete the new file + const result = await ossObject.delete(name); + assert.equal(result.res.status, 204); + }); + + it('should add object with content buffer', async () => { + name = `${prefix}oss-client/oss/put-buffer`; + const object = await ossObject.put(`/${name}`, Buffer.from('foo content')); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.name, name); + }); + + it('should add object with readstream', async () => { + name = `${prefix}oss-client/oss/put-readstream`; + const object = await ossObject.put(name, createReadStream(__filename)); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(typeof object.res.headers.etag, 'string'); + assert.equal(object.name, name); + }); + + it('should add object with Readable', async () => { + name = `${prefix}oss-client/oss/put-Readable`; + async function* generate() { + yield 'Hello, '; + yield '你好 OSS'; + } + const readable = Readable.from(generate()); + const object = await ossObject.put(name, readable, { + headers: { + 'content-length': Buffer.byteLength('Hello, 你好 OSS', 'utf-8').toString(), + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(typeof object.res.headers.etag, 'string'); + assert.equal(object.name, name); + const result = await ossObject.get(name); + assert.equal(result.content.toString(), 'Hello, 你好 OSS'); + }); + + it('should add object with meta', async () => { + name = `${prefix}oss-client/oss/put-meta.js`; + const object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.res.size, 0); + assert.equal(object.name, name); + + const info = await ossObject.head(name); + assert.deepEqual(info.meta, { + uid: '1', + slus: 'test.html', + }); + assert.equal(info.status, 200); + }); + + it('should set Content-Disposition with ascii name', async () => { + name = `${prefix}oss-client/oss/put-Content-Disposition.js`; + const object = await ossObject.put(name, __filename, { + headers: { + 'Content-Disposition': 'ascii-name.js', + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + assert.equal(info.res.headers['content-disposition'], 'ascii-name.js'); + }); + + it('should set Content-Disposition with no-ascii name', async () => { + name = `${prefix}oss-client/oss/put-Content-Disposition.js`; + const object = await ossObject.put(name, __filename, { + headers: { + 'Content-Disposition': encodeURIComponent('non-ascii-名字.js'), + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + assert.equal(info.res.headers['content-disposition'], 'non-ascii-%E5%90%8D%E5%AD%97.js'); + }); + + it('should set Expires', async () => { + name = `${prefix}oss-client/oss/put-Expires.js`; + const object = await ossObject.put(name, __filename, { + headers: { + Expires: '1000000', + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + assert.equal(info.res.headers.expires, '1000000'); + }); + + it('should set custom Content-Type', async () => { + name = `${prefix}oss-client/oss/put-Content-Type.js`; + const object = await ossObject.put(name, __filename, { + headers: { + 'Content-Type': 'text/plain; charset=gbk', + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + assert.equal(info.res.headers['content-type'], 'text/plain; charset=gbk'); + }); + + it('should set custom content-type lower case', async () => { + name = `${prefix}oss-client/oss/put-content-type.js`; + const object = await ossObject.put(name, __filename, { + headers: { + 'content-type': 'application/javascript; charset=utf8', + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + assert.equal(info.res.headers['content-type'], 'application/javascript; charset=utf8'); + }); + + it('should set custom Content-MD5 and ignore case', async () => { + name = `test-md5-${Date.now()}.js`; + const content = Buffer.alloc(1024 * 4); + const MD5Value = createHash('md5').update(content).digest('base64'); + await ossObject.put(name, content, { + headers: { + 'Content-MD5': MD5Value, + }, + }); + await ossObject.put(name, content, { + headers: { + 'content-Md5': MD5Value, + }, + }); + }); + + it('should return correct encode when name include + and space', async () => { + name = `${prefix}ali-sdkhahhhh+oss+mm xxx.js`; + const object = await ossObject.put(name, __filename, { + headers: { + 'Content-Type': 'text/plain; charset=gbk', + }, + }); + assert(object.name, name); + const info = await ossObject.head(name); + const url = (info.res as any).requestUrls[0]; + const urlObject = new URL(url); + assert.equal(urlObject.pathname, `/${prefix}ali-sdkhahhhh%2Boss%2Bmm%20xxx.js`); + assert.equal(info.res.headers['content-type'], 'text/plain; charset=gbk'); + }); + + it('should work with x-oss-forbid-overwrite header to not allow put same name file', async () => { + const body = Buffer.from('san'); + name = `${prefix}put/testsan`; + const resultPut = await ossObject.put(name, body); + assert.equal(resultPut.res.status, 200); + await assert.rejects(async () => { + await ossObject.put(name, body, { + headers: { 'x-oss-forbid-overwrite': 'true' }, + }); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'FileAlreadyExists'); + assert.match(err.message, /The object you specified already exists and can not be overwritten\./); + return true; + }); + }); + + it('should throw error when path is not file ', async () => { + const file = __dirname; + name = `${prefix}put/testpathnotfile`; + await assert.rejects(async () => { + await ossObject.put(name, file); + }, (err: Error) => { + assert.equal(`${__dirname} is not file`, err.message); + return true; + }); + }); + }); + + describe('putStream()', () => { + let name: string; + afterEach(async () => { + await ossObject.delete(name); + }); + + it('should add object with streaming way', async () => { + name = `${prefix}oss-client/oss/putStream-localfile.js`; + const object = await ossObject.putStream(name, createReadStream(__filename)); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.res.size, 0); + assert.equal(object.name, name); + assert(object.url); + + // check content + const r = await ossObject.get(name); + assert.equal(r.res.headers['content-type'], 'application/javascript'); + const stats = await stat(__filename); + assert.equal(r.res.headers['content-length'], `${stats.size}`); + assert.equal(r.res.status, 200); + assert((r.res as RawResponseWithMeta).timing.contentDownload > 0); + assert(r.content); + assert.equal(r.content.toString(), await readFile(__filename, 'utf8')); + }); + + it('should add image with file streaming way', async () => { + name = `${prefix}oss-client/oss/nodejs-1024x768.png`; + const imagePath = path.join(__dirname, 'nodejs-1024x768.png'); + const object = await ossObject.putStream(name, createReadStream(imagePath), { + mime: 'image/png', + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.res.size, 0); + assert.equal(object.name, name); + + // check content + const r = await ossObject.get(name); + // console.log(r.res.headers); + // { + // server: 'AliyunOSS', + // date: 'Sat, 22 Oct 2022 13:25:55 GMT', + // 'content-type': 'image/png', + // 'content-length': '502182', + // connection: 'keep-alive', + // 'x-oss-request-id': '6353EF633DE20A809D8088EA', + // 'accept-ranges': 'bytes', + // etag: '"39D12ED73B63BAAC31F980F555AE4FDE"', + // 'last-modified': 'Sat, 22 Oct 2022 13:25:55 GMT', + // 'x-oss-object-type': 'Normal', + // 'x-oss-hash-crc64ecma': '8835162692478804631', + // 'x-oss-storage-class': 'Standard', + // 'content-md5': 'OdEu1ztjuqwx+YD1Va5P3g==', + // 'x-oss-server-time': '14' + // } + assert.equal(r.res.status, 200); + assert.equal(r.res.headers['content-type'], 'image/png'); + const buf = await readFile(imagePath); + assert.equal(r.res.headers['content-length'], `${buf.length}`); + assert(r.content); + assert.equal(r.content.length, buf.length); + assert.deepEqual(r.content, buf); + }); + + it('should put object with http streaming way', async () => { + name = `${prefix}oss-client/oss/nodejs-1024x768.png`; + const nameCpy = `${prefix}oss-client/oss/nodejs-1024x768`; + const imagePath = path.join(__dirname, 'nodejs-1024x768.png'); + await ossObject.putStream(name, createReadStream(imagePath), { mime: 'image/png' }); + const signUrl = ossObject.signatureUrl(name, { expires: 3600 }); + const { res: httpStream, status } = await urllib.request(signUrl, { + dataType: 'stream', + }); + assert.equal(httpStream.headers['content-type'], 'image/png'); + assert.equal(httpStream.headers['content-length'], '502182'); + assert.equal(status, 200); + const putResult = await ossObject.putStream(nameCpy, httpStream); + assert.equal(putResult.res.status, 200); + const getResult = await ossObject.get(nameCpy); + assert.equal(getResult.res.status, 200); + assert.equal(getResult.res.headers['content-type'], 'application/octet-stream'); + assert.equal(getResult.res.headers['content-length'], httpStream.headers['content-length']); + assert.equal(getResult.res.headers.etag, putResult.res.headers.etag); + assert.equal(getResult.res.headers.etag, httpStream.headers.etag); + }); + + it('should add very big file: 4mb with streaming way', async () => { + name = `${prefix}oss-client/oss/bigfile-4mb.bin`; + const bigFile = path.join(tmpdir, 'bigfile-4mb.bin'); + await writeFile(bigFile, Buffer.alloc(4 * 1024 * 1024).fill('a\n')); + const object = await ossObject.putStream(name, createReadStream(bigFile)); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + assert.equal(typeof object.res.rt, 'number'); + assert.equal(object.res.size, 0); + assert.equal(object.name, name); + + // check content + const r = await ossObject.get(name); + assert.equal(r.res.status, 200); + assert.equal(r.res.headers['content-type'], 'application/octet-stream'); + assert.equal(r.res.size, 4 * 1024 * 1024); + const buf = await readFile(bigFile); + assert(r.content); + assert.equal(r.content.length, buf.length); + assert.deepEqual(r.content, buf); + }); + + it('should throw error with stream destroy', async () => { + name = `${prefix}oss-client/oss/putStream-source-destroy.js`; + await assert.rejects(async () => { + const readerStream = createReadStream(`${__filename}.notexists.js`); + await ossObject.putStream(name, readerStream); + }, (err: any) => { + assert.strictEqual(err.status, -1); + return true; + }); + }); + }); + + describe('putMeta()', () => { + let name: string; + before(async () => { + name = `${prefix}oss-client/oss/putMeta.js`; + const object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should update exists object meta', async () => { + await ossObject.putMeta(name, { + uid: '2', + }); + const info = await ossObject.head(name); + assert.equal(info.meta.uid, '2'); + assert(!info.meta.pid); + assert(!info.meta.slus); + }); + + it('should throw NoSuchKeyError when update not exists object meta', async () => { + await assert.rejects(async () => { + await ossObject.putMeta(`${name}not-exists`, { + uid: '2', + }); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + return true; + }); + }); + }); + + describe('putACL(), getACL()', () => { + let name: string; + after(async () => { + await ossObject.delete(name); + }); + + it('should put and get object ACL', async () => { + name = `${prefix}object/acl`; + const r1 = await ossObject.put(name, Buffer.from('hello world')); + assert.equal(r1.res.status, 200); + + const r2 = await ossObject.getACL(name); + assert.equal(r2.res.status, 200); + assert.equal(r2.acl, 'default'); + assert(r2.owner); + assert(r2.owner.displayName); + assert(r2.owner.id); + + const r3 = await ossObject.putACL(name, 'public-read'); + assert.equal(r3.res.status, 200); + + const r4 = await ossObject.getACL(name); + assert.equal(r4.res.status, 200); + assert.equal(r4.acl, 'public-read'); + + const r5 = await ossObject.get(name); + assert.equal(r5.res.status, 200); + assert.deepEqual(r5.content, Buffer.from('hello world')); + }); + }); + + describe('delete()', () => { + it('should delete exists object', async () => { + const name = `${prefix}oss-client/oss/delete.js`; + await ossObject.put(name, __filename); + + const info = await ossObject.delete(name); + assert.equal(info.res.status, 204); + assert.equal(info.status, 204); + + // await utils.throws(async () => { + // await store.head(name); + // }, 'NoSuchKeyError'); + }); + + it('should delete not exists object', async () => { + const info = await ossObject.delete(`not-exists-name-${randomUUID()}`); + assert.equal(info.res.status, 204); + }); + }); + + describe('deleteMulti()', () => { + const names: string[] = []; + beforeEach(async () => { + let name = `${prefix}oss-client/oss/deleteMulti0.js`; + names.push(name); + await ossObject.put(name, __filename); + + name = `${prefix}oss-client/oss/deleteMulti1.js`; + names.push(name); + await ossObject.put(name, __filename); + + name = `${prefix}oss-client/oss/deleteMulti2.js`; + names.push(name); + await ossObject.put(name, __filename); + }); + + after(async () => { + for (const name of names) { + await ossObject.delete(name); + } + }); + + it('should delete 3 exists objs', async () => { + const result = await ossObject.deleteMulti(names); + assert.deepEqual( + result.deleted.map(v => v.Key), + names, + ); + assert.equal(result.res.status, 200); + }); + + it('should delete 2 exists and 2 not exists objs', async () => { + const result = await ossObject.deleteMulti(names.slice(0, 2).concat([ 'not-exist1', 'not-exist2' ])); + assert.deepEqual( + result.deleted.map(v => v.Key), + names.slice(0, 2).concat([ 'not-exist1', 'not-exist2' ]), + ); + assert.equal(result.res.status, 200); + }); + + it('should delete 1 exists objs', async () => { + const result = await ossObject.deleteMulti(names.slice(0, 1)); + assert.deepEqual( + result.deleted.map(v => v.Key), + names.slice(0, 1), + ); + assert.equal(result.res.status, 200); + }); + + it('should delete in quiet mode', async () => { + const result = await ossObject.deleteMulti(names, { + quiet: true, + }); + assert.equal(result.deleted.length, 0); + assert.equal(result.res.status, 200); + }); + }); + + describe('head()', () => { + let name: string; + let resHeaders: IncomingHttpHeaders; + before(async () => { + name = `${prefix}oss-client/oss/head-meta.js`; + const object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + resHeaders = object.res.headers; + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should head not exists object throw NoSuchKey', async () => { + await assert.rejects(async () => { + await ossObject.head(`${name}not-exists`); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + assert.equal(typeof err.requestId, 'string'); + return true; + }); + }); + + it('should head exists object with If-Modified-Since < object modified time', async () => { + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + const info = await ossObject.head(name, { + headers: { + 'If-Modified-Since': lastYear.toUTCString(), + }, + }); + assert.equal(info.status, 200); + assert(info.meta); + assert.deepEqual(info.meta, { pid: '123', slus: 'test.html', uid: '1' }); + }); + + it('should head exists object with If-Modified-Since = object modified time', async () => { + const info = await ossObject.head(name, { + headers: { + 'If-Modified-Since': resHeaders.date, + }, + }); + assert.equal(info.status, 304); + assert.deepEqual(info.meta, { pid: '123', slus: 'test.html', uid: '1' }); + }); + + it('should head exists object with If-Modified-Since > object modified time', async () => { + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + + const info = await ossObject.head(name, { + headers: { + 'If-Modified-Since': nextYear.toUTCString(), + }, + }); + assert.equal(info.status, 304); + assert.deepEqual(info.meta, { pid: '123', slus: 'test.html', uid: '1' }); + }); + + it('should head exists object with If-Unmodified-Since < object modified time', async () => { + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + await assert.rejects(async () => { + await ossObject.head(name, { + headers: { + 'If-Unmodified-Since': lastYear.toUTCString(), + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'PreconditionFailed'); + assert.equal(err.status, 412); + return true; + }); + }); + + it('should head exists object with If-Unmodified-Since = object modified time', async () => { + const info = await ossObject.head(name, { + headers: { + 'If-Unmodified-Since': resHeaders.date, + }, + }); + assert.equal(info.status, 200); + assert(info.meta); + }); + + it('should head exists object with If-Unmodified-Since > object modified time', async () => { + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + const info = await ossObject.head(name, { + headers: { + 'If-Unmodified-Since': nextYear.toUTCString(), + }, + }); + assert.equal(info.status, 200); + assert(info.meta); + }); + + it('should head exists object with If-Match equal etag', async () => { + const info = await ossObject.head(name, { + headers: { + 'If-Match': resHeaders.etag, + }, + }); + assert.equal(info.meta.uid, '1'); + assert.equal(info.meta.pid, '123'); + assert.equal(info.meta.slus, 'test.html'); + assert.equal(info.status, 200); + }); + + it('should head exists object with If-Match not equal etag', async () => { + await assert.rejects(async () => { + await ossObject.head(name, { + headers: { + 'If-Match': '"foo-etag"', + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.name, 'OSSClientError'); + assert.equal(err.code, 'PreconditionFailed'); + assert.equal(err.status, 412); + return true; + }); + }); + + it('should head exists object with If-None-Match equal etag', async () => { + const info = await ossObject.head(name, { + headers: { + 'If-None-Match': resHeaders.etag, + }, + }); + assert(info.meta); + assert.equal(info.status, 304); + }); + + it('should head exists object with If-None-Match not equal etag', async () => { + const info = await ossObject.head(name, { + headers: { + 'If-None-Match': '"foo-etag"', + }, + }); + assert.equal(info.meta.uid, '1'); + assert.equal(info.meta.pid, '123'); + assert.equal(info.meta.slus, 'test.html'); + assert.equal(info.status, 200); + }); + }); + + describe('signatureUrl() and asyncSignatureUrl()', () => { + let name: string; + let needEscapeName: string; + before(async () => { + name = `${prefix}oss-client/oss/signatureUrl.js`; + let object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + + needEscapeName = `${prefix}oss-client/oss/%3get+meta-signatureUrl.js`; + object = await ossObject.put(needEscapeName, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should signature url get object ok', async () => { + const result = await ossObject.get(name); + const url = ossObject.signatureUrl(name); + const urlRes = await urllib.request(url); + assert.equal(urlRes.status, 200); + assert.equal(urlRes.data.toString(), result.content.toString()); + }); + + it('should asyncSignatureUrl get object ok', async () => { + const result = await ossObject.get(name); + const url = await ossObject.asyncSignatureUrl(name); + const urlRes = await urllib.request(url); + assert.equal(urlRes.status, 200); + assert.equal(urlRes.data.toString(), result.content.toString()); + }); + + it('should signature url with response limitation', () => { + const response = { + 'content-type': 'xml', + 'content-language': 'zh-cn', + }; + const url = ossObject.signatureUrl(name, { response }); + assert(url.includes('response-content-type=xml')); + assert(url.includes('response-content-language=zh-cn')); + }); + + it('should signature url with options contains other parameters', async () => { + const options = { + expires: 3600, + subResource: { + 'x-oss-process': 'image/resize,w_20', + }, + // others parameters + filename: 'test.js', + testParameters: 'xxx', + }; + const imageName = `${prefix}oss-client/oss/nodejs-test-signature-1024x768.png`; + const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); + await ossObject.put(imageName, originImagePath, { + mime: 'image/png', + }); + + const signUrl = ossObject.signatureUrl(imageName, options); + assert.match(signUrl, /x-oss-process=image%2Fresize%2Cw_20/); + const urlRes = await urllib.request(signUrl); + assert.equal(urlRes.status, 200); + }); + + it('should signature url with image processed and get object ok', async () => { + const imageName = `${prefix}oss-client/oss/nodejs-test-signature-1024x768.png`; + const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); + await ossObject.put(imageName, originImagePath, { + mime: 'image/png', + }); + + const signUrl = ossObject.signatureUrl(imageName, { expires: 3600, process: 'image/resize,w_200' }); + assert.match(signUrl, /x-oss-process=image%2Fresize%2Cw_200/); + const urlRes = await urllib.request(signUrl); + assert.equal(urlRes.status, 200); + }); + + it('should signature url for PUT', async () => { + const putString = 'Hello World'; + const contentMD5 = createHash('md5').update(Buffer.from(putString, 'utf8')).digest('base64'); + const url = ossObject.signatureUrl(name, { + method: 'PUT', + 'Content-Type': 'text/plain; charset=UTF-8', + 'Content-Md5': contentMD5, + }); + const headers = { + 'Content-Type': 'text/plain; charset=UTF-8', + 'Content-MD5': contentMD5, + }; + // console.log('%o', url); + const res = await urllib.request(url, { method: 'PUT', data: putString, headers, dataType: 'text' }); + // console.log(res.data); + assert.equal(res.status, 200); + const headRes = await ossObject.head(name); + assert.equal(headRes.status, 200); + assert.equal(headRes.res.headers.etag, + `"${Buffer.from(contentMD5, 'base64').toString('hex').toUpperCase()}"`); + }); + + it('should signature url get need escape object ok', async () => { + const result = await ossObject.get(needEscapeName); + const url = ossObject.signatureUrl(needEscapeName); + const urlRes = await urllib.request(url); + assert.equal(urlRes.data.toString(), result.content.toString()); + }); + + it('should signature url with custom host ok', async () => { + const tempStore = new OSSObject({ + ...config.oss, + endpoint: 'http://www.aliyun.com', + }); + + const url = tempStore.signatureUrl(name); + // http://${bucket}.www.aliyun.com/darwin-v4.4.2/oss-client/oss/get-meta.js?OSSAccessKeyId= + assert.match(url, /http:\/\/.+?\.www\.aliyun.com\//); + }); + + it('should signature url with traffic limit', async () => { + const limitName = `${prefix}oss-client/oss/trafficLimit.js`; + const size = 1 * 1024 * 1024; + const content1mb = Buffer.alloc(size).fill('a\n'); + + let url = ossObject.signatureUrl(limitName, { + trafficLimit: 8 * 1024 * 100 * 4, + method: 'PUT', + }); + let result = await urllib.request(url, { + method: 'PUT', + content: content1mb, + timeout: 600000, + }); + assert.equal(200, result.status); + + url = ossObject.signatureUrl(limitName, { + trafficLimit: 8 * 1024 * 100 * 4, + }); + result = await urllib.request(url, { + timeout: 600000, + }); + assert.equal(200, result.status); + assert.equal(result.headers['content-length'], size.toString()); + }); + }); + + describe('get()', () => { + let name: string; + let resHeaders: IncomingHttpHeaders; + let needEscapeName: string; + before(async () => { + name = `${prefix}oss-client/oss/get-meta.js`; + let object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + resHeaders = object.res.headers; + + needEscapeName = `${prefix}oss-client/oss/%3get+meta.js`; + object = await ossObject.put(needEscapeName, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should store object to local file', async () => { + const savePath = path.join(tmpdir, name.replace(/\//g, '-')); + const result = await ossObject.get(name, savePath); + assert.equal(result.res.status, 200); + assert(!(result.res as any).requestUrls[0].includes('response-cache-control=no-cache')); + assert.equal((await stat(savePath)).size, (await stat(__filename)).size); + }); + + it('should escape uri path ok', async () => { + const savePath = path.join(tmpdir, needEscapeName.replace(/\//g, '-')); + const result = await ossObject.get(needEscapeName, savePath); + assert.equal(result.res.status, 200); + assert.equal((await stat(savePath)).size, (await stat(__filename)).size); + }); + + it.skip('should throw error when save path parent dir not exists', async () => { + const savePath = path.join(tmpdir, 'not-exists', name.replace(/\//g, '-')); + await assert.rejects(async () => { + await ossObject.get(name, savePath); + }, (err: Error) => { + assert(err.message.includes('ENOENT')); + return true; + }); + }); + + it('should store object to writeStream', async () => { + const savePath = path.join(tmpdir, name.replace(/\//g, '-')); + const result = await ossObject.get(name, createWriteStream(savePath)); + assert.equal(result.res.status, 200); + assert.equal((await stat(savePath)).size, (await stat(__filename)).size); + }); + + it('should store not exists object to file', async () => { + const savePath = path.join(tmpdir, name.replace(/\//g, '-')); + await assert.rejects(async () => { + await ossObject.get(`${name}not-exists`, savePath); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + assert(!existsSync(savePath)); + return true; + }); + }); + + it.skip('should throw error when writeStream emit error', async () => { + const savePath = path.join(tmpdir, 'not-exists-dir', name.replace(/\//g, '-')); + await assert.rejects(async () => { + await ossObject.get(name, createWriteStream(savePath)); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + assert(!existsSync(savePath)); + return true; + }); + }); + + it('should get object content buffer', async () => { + let result = await ossObject.get(name); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert(result.content.toString().includes('oss-client/oss/get-meta.js')); + + result = await ossObject.get(name, undefined); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert(result.content.toString().includes('oss-client/oss/get-meta.js')); + }); + + it('should get object content buffer with image process', async () => { + const imageName = `${prefix}oss-client/oss/nodejs-test-get-image-1024x768.png`; + const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); + path.join(__dirname, 'nodejs-processed-w200.png'); + await ossObject.put(imageName, originImagePath, { + mime: 'image/png', + }); + + let result = await ossObject.get(imageName, { process: 'image/resize,w_200' }); + assert.equal(result.res.status, 200); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + // assert.deepEqual(result.content == fs.readFileSync(processedImagePath), + // 'get content should be same as test/nodejs-processed-w200.png'); + + // it should use the value of process + // when 'subres.x-oss-process' coexists with 'process'. + result = await ossObject.get(imageName, { + process: 'image/resize,w_200', + subres: { 'x-oss-process': 'image/resize,w_100' }, + }); + assert.equal(result.res.status, 200); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + }); + + it('should throw NoSuchKeyError when object not exists', async () => { + await assert.rejects(async () => { + await ossObject.get('not-exists-key'); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + assert.equal(typeof err.requestId, 'string'); + assert.match(err.message, /The specified key does not exist\./); + return true; + }); + }); + + describe('If-Modified-Since header', () => { + it('should 200 when If-Modified-Since < object modified time', async () => { + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + const result = await ossObject.get(name, { + headers: { + 'If-Modified-Since': lastYear.toUTCString(), + }, + }); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); + assert.equal(result.res.status, 200); + }); + + it('should 304 when If-Modified-Since = object modified time', async () => { + const result = await ossObject.get(name, { + headers: { + 'If-Modified-Since': resHeaders.date, + }, + }); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert.equal(result.content.length, 0); + assert.equal(result.res.status, 304); + }); + + it('should 304 when If-Modified-Since > object modified time', async () => { + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + const result = await ossObject.get(name, { + headers: { + 'If-Modified-Since': nextYear.toUTCString(), + }, + }); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert.equal(result.content.length, 0); + assert.equal(result.res.status, 304); + }); + }); + + describe('If-Unmodified-Since header', () => { + it('should throw PreconditionFailedError when If-Unmodified-Since < object modified time', async () => { + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + await assert.rejects(async () => { + await ossObject.get(name, { + headers: { + 'If-Unmodified-Since': lastYear.toUTCString(), + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.status, 412); + assert.equal(err.code, 'PreconditionFailed'); + assert.match(err.message, + /At least one of the pre-conditions you specified did not hold. \(condition=If-Unmodified-Since\)/); + assert.equal(typeof err.requestId, 'string'); + assert.equal(typeof err.hostId, 'string'); + return true; + }); + }); + + it('should 200 when If-Unmodified-Since = object modified time', async () => { + const result = await ossObject.get(name, { + headers: { + 'If-Unmodified-Since': resHeaders.date, + }, + }); + assert.equal(result.res.status, 200); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); + }); + + it('should 200 when If-Unmodified-Since > object modified time', async () => { + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + const result = await ossObject.get(name, { + headers: { + 'If-Unmodified-Since': nextYear.toUTCString(), + }, + }); + assert.equal(result.res.status, 200); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); + }); + }); + + describe('If-Match header', () => { + it('should 200 when If-Match equal object etag', async () => { + const result = await ossObject.get(name, { + headers: { + 'If-Match': resHeaders.etag, + }, + }); + assert.equal(result.res.status, 200); + }); + + it('should throw PreconditionFailedError when If-Match not equal object etag', async () => { + await assert.rejects(async () => { + await ossObject.get(name, { + headers: { + 'If-Match': 'foo', + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.code, 'PreconditionFailed'); + assert.equal(err.status, 412); + return true; + }); + }); + }); + + describe('If-None-Match header', () => { + it('should 200 when If-None-Match not equal object etag', async () => { + const result = await ossObject.get(name, { + headers: { + 'If-None-Match': 'foo', + }, + }); + assert.equal(result.res.status, 200); + }); + + it('should 304 when If-None-Match equal object etag', async () => { + const result = await ossObject.get(name, { + headers: { + 'If-None-Match': resHeaders.etag, + }, + }); + assert.equal(result.res.status, 304); + assert.equal(result.content.length, 0); + }); + }); + + describe('Range header', () => { + it('should work with Range header and get top 10 bytes content', async () => { + const content = Buffer.from('aaaaaaaaaabbbbbbbbbb'); + await ossObject.put('range-header-test', content); + const result = await ossObject.get('range-header-test', { + headers: { + Range: 'bytes=0-9', + }, + }); + assert.equal(result.res.headers['content-length'], '10'); + assert(Buffer.isBuffer(result.content), 'content should be Buffer'); + assert.equal(result.content.toString(), 'aaaaaaaaaa'); + }); + }); + }); + + describe('getStream()', () => { + let name: string; + before(async () => { + name = `${prefix}oss-client/oss/get-stream.js`; + await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test.html', + }, + }); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should get exists object stream', async () => { + const result = await ossObject.getStream(name); + assert.equal(result.res.status, 200); + assert(result.stream instanceof Readable); + const tmpfile = path.join(tmpdir, 'get-stream.js'); + const tmpStream = createWriteStream(tmpfile); + + function finish() { + return new Promise(resolve => { + tmpStream.on('finish', () => { + resolve(); + }); + }); + } + + result.stream.pipe(tmpStream); + await finish(); + assert.equal(readFileSync(tmpfile, 'utf8'), readFileSync(__filename, 'utf8')); + }); + + /** + * Image processing uses different compression algorithms, + * and the performance may be inconsistent + * between different regions + */ + it('should get image stream with image process', async () => { + const imageName = `${prefix}oss-client/oss/nodejs-test-getstream-image-1024x768.png`; + const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); + await ossObject.put(imageName, originImagePath, { + mime: 'image/png', + }); + + let result = await ossObject.getStream(imageName, { process: 'image/resize,w_200' }); + let result2 = await ossObject.getStream(imageName, { process: 'image/resize,w_200' }); + assert.equal(result.res.status, 200); + assert.equal(result2.res.status, 200); + result = await ossObject.getStream(imageName, { + process: 'image/resize,w_200', + subres: { 'x-oss-process': 'image/resize,w_100' }, + }); + result2 = await ossObject.getStream(imageName, { + process: 'image/resize,w_200', + subres: { 'x-oss-process': 'image/resize,w_100' }, + }); + assert.equal(result.res.status, 200); + assert.equal(result2.res.status, 200); + }); + + it('should throw error when object not exists', async () => { + await assert.rejects(async () => { + await ossObject.getStream(`${name}not-exists`); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + return true; + }); + }); + }); + + describe('getObjectMeta()', () => { + let name: string; + let resHeaders: IncomingHttpHeaders; + let fileSize: number; + before(async () => { + name = `${prefix}oss-client/oss/object-meta.js`; + const object = await ossObject.put(name, __filename); + fileSize = (await stat(__filename)).size; + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + resHeaders = object.res.headers; + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should head not exists object throw NoSuchKeyError', async () => { + await assert.rejects(async () => { + await ossObject.getObjectMeta(`${name}not-exists`); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.equal(err.status, 404); + assert.equal(typeof err.requestId, 'string'); + return true; + }); + }); + + it('should return Etag and Content-Length', async () => { + const info = await ossObject.getObjectMeta(name); + assert.equal(info.status, 200); + assert.equal(info.res.headers.etag, resHeaders.etag); + assert.equal(info.res.headers['content-length'], fileSize.toString()); + // no versionId won't return this header + assert(!info.res.headers['x-oss-last-access-time']); + }); + }); + + describe('copy()', () => { + let name: string; + let resHeaders: IncomingHttpHeaders; + let otherBucket: string; + let otherBucketObject: string; + before(async () => { + name = `${prefix}oss-client/oss/copy-meta.js`; + const object = await ossObject.put(name, __filename, { + meta: { + uid: 1, + pid: '123', + slus: 'test-copy.html', + }, + }); + assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); + resHeaders = object.res.headers; + + // otherBucket = `oss-client-copy-source-bucket-${prefix.replace(/[/.]/g, '-')}`; + // otherBucket = otherBucket.substring(0, otherBucket.length - 1); + // await store.putBucket(otherBucket); + // store.useBucket(otherBucket); + // otherBucketObject = `${prefix}oss-client/oss/copy-source.js`; + // await store.put(otherBucketObject, __filename); + // store.useBucket(bucket); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should copy object from same bucket', async () => { + const targetName = `${prefix}oss-client/oss/copy-new.js`; + const result = await ossObject.copy(targetName, name); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + + const info = await ossObject.head(targetName); + assert.equal(info.meta.uid, '1'); + assert.equal(info.meta.pid, '123'); + assert.equal(info.meta.slus, 'test-copy.html'); + assert.equal(info.status, 200); + assert.equal(info.res.headers.etag, resHeaders.etag); + }); + + it('should copy object from same bucket and set content-disposition', async () => { + const targetName = `${prefix}oss-client/oss/copy-content-disposition.js`; + const disposition = 'attachment; filename=test'; + const result = await ossObject.copy(targetName, name, { + headers: { + 'Content-Disposition': disposition, + }, + }); + assert.strictEqual(result.res.status, 200); + const { res } = await ossObject.get(targetName); + assert.strictEqual(res.headers['content-disposition'], disposition); + }); + + it.skip('should copy object from other bucket, sourceBucket in copySource', async () => { + const copySource = `/${otherBucket}/${otherBucketObject}`; + const copyTarget = `${prefix}oss-client/oss/copy-target.js`; + const result = await ossObject.copy(copyTarget, copySource); + assert.equal(result.res.status, 200); + + const info = await ossObject.head(copyTarget); + assert.equal(info.status, 200); + }); + + it.skip('should copy object from other bucket, sourceBucket is a separate parameter', async () => { + const copySource = otherBucketObject; + const copyTarget = `${prefix}oss-client/oss/has-bucket-name-copy-target.js`; + const result = await ossObject.copy(copyTarget, copySource, otherBucket); + assert.equal(result.res.status, 200); + + const info = await ossObject.head(copyTarget); + assert.equal(info.status, 200); + }); + + it('should copy object with non-english name', async () => { + const sourceName = `${prefix}oss-client/oss/copy-meta_测试.js`; + await ossObject.put(sourceName, __filename, { + meta: { + uid: 2, + pid: '1234', + slus: 'test1.html', + }, + }); + const targetName = `${prefix}oss-client/oss/copy-new_测试.js`; + const result = await ossObject.copy(targetName, sourceName); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + + const info = await ossObject.head(targetName); + assert.equal(info.meta.uid, '2'); + assert.equal(info.meta.pid, '1234'); + assert.equal(info.meta.slus, 'test1.html'); + assert.equal(info.status, 200); + }); + + it.skip('should copy object with non-english name and bucket', async () => { + // let sourceName = `${prefix}oss-client/oss/copy-meta_测试2.js`; + // let result = await ossObject.put(sourceName, __filename, { + // meta: { + // uid: 3, + // pid: '12345', + // slus: 'test2.html', + // }, + // }); + + // let info = await ossObject.head(sourceName); + // assert.equal(info.meta.uid, '3'); + // assert.equal(info.meta.pid, '12345'); + // assert.equal(info.meta.slus, 'test2.html'); + // assert.equal(info.status, 200); + + // sourceName = `/${bucket}/${sourceName}`; + // const originname = `${prefix}oss-client/oss/copy-new_测试2.js`; + // result = await ossObject.copy(originname, sourceName); + // assert.equal(result.res.status, 200); + // assert.equal(typeof result.data.etag, 'string'); + // assert.equal(typeof result.data.lastModified, 'string'); + + // info = await ossObject.head(originname); + // assert.equal(info.meta.uid, '3'); + // assert.equal(info.meta.pid, '12345'); + // assert.equal(info.meta.slus, 'test2.html'); + // assert.equal(info.status, 200); + }); + + it('should copy object and set other meta', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-2.js`; + const result = await ossObject.copy(targetName, name, { + meta: { + uid: '2', + }, + }); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + + const info = await ossObject.head(targetName); + assert.equal(info.meta.uid, '2'); + assert(!info.meta.pid); + assert(!info.meta.slus); + assert.equal(info.status, 200); + }); + + it('should copy object with special characters such as ;,/?:@&=+$#', async () => { + const sourceName = `${prefix}oss-client/oss/copy-a;,/?:@&=+$#b.js`; + await ossObject.put(sourceName, Buffer.alloc(1024 * 1024)); + await ossObject.copy(`${prefix}oss-client/oss/copy-a.js`, sourceName); + await ossObject.copy(`${prefix}oss-client/oss/copy-a+b.js`, sourceName); + }); + + it('should use copy to change exists object headers', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-3.js`; + let result = await ossObject.copy(targetName, name); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + let info = await ossObject.head(targetName); + assert(!info.res.headers['cache-control']); + + // add Cache-Control header to a exists object + result = await ossObject.copy(targetName, targetName, { + headers: { + 'Cache-Control': 'max-age=0, s-maxage=86400', + }, + }); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + info = await ossObject.head(targetName); + assert.equal(info.res.headers['cache-control'], 'max-age=0, s-maxage=86400'); + }); + + it('should throw NoSuchKeyError when source object not exists', async () => { + await assert.rejects(async () => { + await ossObject.copy('new-object', 'not-exists-object'); + }, (err: OSSClientError) => { + assert.equal(err.code, 'NoSuchKey'); + assert.match(err.message, /The specified key does not exist\./); + assert.equal(err.status, 404); + return true; + }); + }); + + describe('If-Match header', () => { + it('should throw PreconditionFailedError when If-Match not equal source object etag', async () => { + await assert.rejects(async () => { + await ossObject.copy('new-name', name, { + headers: { + 'If-Match': 'foo-bar', + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.code, 'PreconditionFailed'); + assert.match( + err.message, + /At least one of the pre-conditions you specified did not hold. \(condition=If-Match\)/, + ); + assert.equal(err.status, 412); + return true; + }); + }); + + it('should copy object when If-Match equal source object etag', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Match.js`; + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Match': resHeaders.etag, + }, + }); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + }); + }); + + describe('If-None-Match header', () => { + it('should return 304 when If-None-Match equal source object etag', async () => { + const result = await ossObject.copy('new-name', name, { + headers: { + 'If-None-Match': resHeaders.etag, + }, + }); + assert.equal(result.res.status, 304); + assert.equal(result.data, null); + }); + + it('should copy object when If-None-Match not equal source object etag', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-None-Match.js`; + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-None-Match': 'foo-bar', + }, + }); + assert.equal(result.res.status, 200); + assert.equal(typeof result.data?.etag, 'string'); + assert.equal(typeof result.data?.lastModified, 'string'); + }); + }); + + describe('If-Modified-Since header', () => { + it('should 304 when If-Modified-Since > source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Modified-Since': nextYear.toUTCString(), + }, + }); + assert.equal(result.res.status, 304); + }); + + it('should 304 when If-Modified-Since >= source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Modified-Since': resHeaders.date, + }, + }); + assert.equal(result.res.status, 304); + }); + + it('should 200 when If-Modified-Since < source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Modified-Since': lastYear.toUTCString(), + }, + }); + assert.equal(result.res.status, 200); + }); + }); + + describe('If-Unmodified-Since header', () => { + it('should 200 when If-Unmodified-Since > source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; + const nextYear = new Date(resHeaders.date!); + nextYear.setFullYear(nextYear.getFullYear() + 1); + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Unmodified-Since': nextYear.toUTCString(), + }, + }); + assert.equal(result.res.status, 200); + }); + + it('should 200 when If-Unmodified-Since >= source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; + const result = await ossObject.copy(targetName, name, { + headers: { + 'If-Unmodified-Since': resHeaders.date, + }, + }); + assert.equal(result.res.status, 200); + }); + + it('should throw PreconditionFailedError when If-Unmodified-Since < source object modified time', async () => { + const targetName = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; + const lastYear = new Date(resHeaders.date!); + lastYear.setFullYear(lastYear.getFullYear() - 1); + await assert.rejects(async () => { + await ossObject.copy(targetName, name, { + headers: { + 'If-Unmodified-Since': lastYear.toUTCString(), + }, + }); + }, (err: OSSClientError) => { + assert.equal(err.code, 'PreconditionFailed'); + assert.match( + err.message, + /At least one of the pre-conditions you specified did not hold. \(condition=If-Unmodified-Since\)/, + ); + assert.equal(err.status, 412); + return true; + }); + }); + }); + }); + + describe('putSymlink() and getSymlink()', () => { + it('Should put and get Symlink', async () => { + const targetName = `${prefix}oss-client/target-测试.js`; + const name = `${prefix}oss-client/symlink-软链接.js`; + await ossObject.put(targetName, __filename); + + const result = await ossObject.putSymlink(name, targetName, { + storageClass: 'IA', + meta: { + uid: '1', + slus: 'test.html', + }, + }); + assert.equal(result.res.status, 200); + + const getResult = await ossObject.getSymlink(name); + assert.equal(getResult.res.status, 200); + // console.log(getResult.res.headers); + assert.equal(getResult.targetName, targetName); + assert.deepEqual(getResult.meta, { + uid: '1', + slus: 'test.html', + }); + + const headResult = await ossObject.head(name); + assert.equal(headResult.res.status, 200); + assert.equal(headResult.res.headers['x-oss-object-type'], 'Symlink'); + assert.deepEqual(headResult.meta, { + uid: '1', + slus: 'test.html', + }); + }); + }); + + describe('getObjectTagging() putObjectTagging() deleteObjectTagging()', () => { + const name = `${prefix}oss-client/tagging-${Date.now()}.js`; + + before(async () => { + await ossObject.put(name, __filename); + }); + + after(async () => { + await ossObject.delete(name); + }); + + it('should get the tags of object', async () => { + const result = await ossObject.getObjectTagging(name); + assert.strictEqual(result.status, 200); + assert.deepEqual(result.tag, {}); + }); + + it('should configures or updates the tags of object', async () => { + const tag = { a: '1', b: '2', c: '' }; + let putResult = await ossObject.putObjectTagging(name, tag); + assert.strictEqual(putResult.status, 200); + + let getResult = await ossObject.getObjectTagging(name); + assert.strictEqual(getResult.status, 200); + assert.deepEqual(getResult.tag, tag); + + const tag2 = { a: '3' }; + putResult = await ossObject.putObjectTagging(name, tag2); + assert.strictEqual(putResult.status, 200); + + getResult = await ossObject.getObjectTagging(name); + assert.strictEqual(getResult.status, 200); + assert.deepEqual(getResult.tag, tag2); + }); + + it('maximum of 10 tags for a object', async () => { + await assert.rejects(async () => { + const tag: any = {}; + Array(11) + .fill(1) + .forEach((_, index) => { + tag[index] = index; + }); + await ossObject.putObjectTagging(name, tag); + }, (err: TypeError) => { + assert.strictEqual('maximum of 10 tags for a object', err.message); + return true; + }); + }); + + it('tag can contain invalid string', async () => { + await assert.rejects(async () => { + const errorStr = '错误字符串@#¥%……&*!'; + const key = errorStr; + const value = errorStr; + const tag = { [key]: value }; + await ossObject.putObjectTagging(name, tag); + }, (err: TypeError) => { + assert.strictEqual( + 'tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)', + err.message); + return true; + }); + }); + + it('tag key can be a maximum of 128 bytes in length', async () => { + await assert.rejects(async () => { + const key = new Array(129).fill('1').join(''); + const tag = { [key]: '1' }; + await ossObject.putObjectTagging(name, tag); + }, (err: TypeError) => { + assert.strictEqual('tag key can be a minimum of 1 byte and a maximum of 128 bytes in length', err.message); + return true; + }); + }); + + it('tag value can be a maximum of 256 bytes in length', async () => { + await assert.rejects(async () => { + const value = new Array(257).fill('1').join(''); + const tag = { a: value }; + await ossObject.putObjectTagging(name, tag); + }, (err: TypeError) => { + assert.strictEqual('tag value can be a maximum of 256 bytes in length', err.message); + return true; + }); + }); + + it('should throw error when the type of tag is not Object', async () => { + await assert.rejects(async () => { + const tag = [{ a: 1 }]; + await ossObject.putObjectTagging(name, tag as any); + }, (err: TypeError) => { + assert.equal(err.message, 'the key and value of the tag must be String'); + return true; + }); + }); + + it('should throw error when the type of tag value is number', async () => { + await assert.rejects(async () => { + const tag = { a: 1 }; + await ossObject.putObjectTagging(name, tag as any); + }, (err: TypeError) => { + assert.strictEqual('the key and value of the tag must be String', err.message); + return true; + }); + }); + + it('should throw error when the type of tag value is Object', async () => { + await assert.rejects(async () => { + const tag = { a: { inner: '1' } }; + await ossObject.putObjectTagging(name, tag as any); + }, (err: TypeError) => { + assert.strictEqual('the key and value of the tag must be String', err.message); + return true; + }); + }); + + it('should throw error when the type of tag value is Array', async () => { + await assert.rejects(async () => { + const tag = { a: [ '1', '2' ] }; + await ossObject.putObjectTagging(name, tag as any); + }, (err: TypeError) => { + assert.strictEqual('the key and value of the tag must be String', err.message); + return true; + }); + }); + + it('should delete the tags of object', async () => { + let result; + const tag = { a: '1', b: '2' }; + await ossObject.putObjectTagging(name, tag); + + result = await ossObject.deleteObjectTagging(name); + assert.strictEqual(result.status, 204); + + result = await ossObject.getObjectTagging(name); + assert.strictEqual(result.status, 200); + assert.deepEqual(result.tag, {}); + }); + }); + + describe('calculatePostSignature()', () => { + it('should get signature for postObject', async () => { + const name = 'calculatePostSignature.js'; + const url = ossObject.generateObjectUrl(name).replace(name, ''); + const date = new Date(); + date.setDate(date.getDate() + 1); + const policy = { + expiration: date.toISOString(), + conditions: [{ bucket: config.oss.bucket }], + }; + + const params = ossObject.calculatePostSignature(policy); + assert.equal(typeof params.policy, 'string'); + const result = await urllib.request(url, { + method: 'POST', + data: { + ...params, + key: name, + }, + files: { + file: createReadStream(__filename), + }, + }); + assert.equal(result.statusCode, 204); + const headRes = await ossObject.head(name); + assert.equal(headRes.status, 200); + }); + + it('should throw error when policy is not JSON or Object', async () => { + assert.throws(() => { + ossObject.calculatePostSignature('string'); + }, /Policy string is not a valid JSON/); + assert.throws(() => { + ossObject.calculatePostSignature(123 as any); + }, /policy must be JSON string or Object/); + }); + }); + + describe('generateObjectUrl()', () => { + it('should return object url', () => { + let name = 'test.js'; + let url = ossObject.generateObjectUrl(name); + assert(url); + + name = '/foo/bar/a%2Faa/test&+-123~!.js'; + url = ossObject.generateObjectUrl(name, 'https://foo.com'); + assert.equal(url, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); + const url2 = ossObject.generateObjectUrl(name, 'https://foo.com/'); + assert.equal(url2, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); + }); + }); + + describe('processObjectSave()', () => { + const name = `${prefix}oss-client/processObjectSave/sourceObject.png`; + const target = `${prefix}oss-client/processObjectSave/processObject_target${Date.now()}.jpg`; + before(async () => { + const imagePath = path.join(__dirname, 'nodejs-1024x768.png'); + await ossObject.put(name, imagePath); + }); + + after(async () => { + await ossObject.delete(name); + await ossObject.delete(target); + }); + + it('should process image', async () => { + const result = await ossObject.processObjectSave( + name, + target, + 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,', + ); + assert.equal(result.res.status, 200); + assert.equal(result.status, 200); + }); + }); +}); diff --git a/test/bucket.test.js b/test/bucket.test.js deleted file mode 100644 index f77b5a3e5..000000000 --- a/test/bucket.test.js +++ /dev/null @@ -1,1492 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const ms = require('humanize-ms'); -const { metaSyncTime, timeout } = require('./config'); - -describe.skip('test/bucket.test.js', () => { - const { prefix, includesConf } = utils; - let store; - let bucket; - let bucketRegion; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-bucket-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - bucketRegion = defaultRegion; - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - // restore object will have cache - after(async () => { - await utils.cleanBucket(store, bucket); - }); - - describe('setBucket()', () => { - it('should check bucket name', async () => { - try { - const name = 'oss-client-test-bucket-/'; - await store.setBucket(name); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'The bucket must be conform to the specifications'); - } - }); - }); - - describe('getBucket()', () => { - it('should get bucket name', async () => { - const name = 'oss-client-test-bucket'; - await store.setBucket(name); - const res = store.getBucket(); - assert.equal(res, name); - }); - }); - - describe('putBucket()', () => { - let name; - let archvieBucket; - before(async () => { - name = `oss-client-test-putbucket-${prefix.replace(/[/.]/g, '-')}`; - name = name.substring(0, name.length - 1); - // just for archive bucket test - archvieBucket = `oss-client-archive-bucket-${prefix.replace(/[/.]/g, '-')}`; - archvieBucket = archvieBucket.substring(0, archvieBucket.length - 1); - await store.putBucket(archvieBucket, { StorageClass: 'Archive', timeout }); - }); - - it('should create a new bucket', async () => { - const result1 = await store.putBucket(name, { timeout }); - assert.equal(result1.bucket, name); - assert.equal(result1.res.status, 200); - }); - - it('should create an archive bucket', async () => { - await utils.sleep(ms(metaSyncTime)); - const result2 = await store.listBuckets( - {}, - { - timeout, - } - ); - const { buckets } = result2; - const m = buckets.some(item => item.name === archvieBucket); - assert(m === true); - buckets.map(item => { - if (item.name === archvieBucket) { - assert(item.StorageClass === 'Archive'); - } - return 1; - }); - }); - - // todo resume - // it('should create an ZRS bucket', async () => { - // const ZRS_name = `oss-client-zrs-${prefix.replace(/[/.]/g, '-').slice(0, -1)}`; - // const ZRS_put_res = await store.putBucket(ZRS_name, { - // dataRedundancyType: 'ZRS' - // }); - // assert.strictEqual(ZRS_put_res.res.status, 200); - // const ZRS_get_res = await store.getBucketInfo(ZRS_name); - // assert.strictEqual(ZRS_get_res.bucket.DataRedundancyType, 'ZRS'); - // await store.deleteBucket(ZRS_name); - // }); - - it('should create an public-read bucket', async () => { - const public_read_name = `oss-client-zrs-${prefix.replace(/[/.]/g, '-').slice(0, -1)}`; - const public_read_name_res = await store.putBucket(public_read_name, { - acl: 'public-read', - }); - assert.strictEqual(public_read_name_res.res.status, 200); - const public_read_name_get_res = await store.getBucketInfo(public_read_name); - assert.strictEqual(public_read_name_get_res.bucket.AccessControlList.Grant, 'public-read'); - await store.deleteBucket(public_read_name); - }); - - after(async () => { - const result = await store.deleteBucket(name); - assert(result.res.status === 200 || result.res.status === 204); - await store.deleteBucket(archvieBucket); - }); - }); - - describe('getBucketInfo', () => { - it('it should return correct bucketInfo when bucket exist', async () => { - const result = await store.getBucketInfo(bucket); - assert.equal(result.res.status, 200); - - assert.equal(result.bucket.Location, `${bucketRegion}`); - assert.equal(result.bucket.ExtranetEndpoint, `${bucketRegion}.aliyuncs.com`); - assert.equal(result.bucket.IntranetEndpoint, `${bucketRegion}-internal.aliyuncs.com`); - assert.equal(result.bucket.AccessControlList.Grant, 'private'); - assert.equal(result.bucket.StorageClass, 'Standard'); - }); - - it('it should return NoSuchBucketError when bucket not exist', async () => { - await utils.throws(async () => { - await store.getBucketInfo('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - }); - - describe('getBucketLoaction', () => { - it('it should return loaction this.region', async () => { - const result = await store.getBucketLocation(bucket); - assert.equal(result.location, bucketRegion); - }); - - it('it should return NoSuchBucketError when bucket not exist', async () => { - await utils.throws(async () => { - await store.getBucketLocation('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - }); - - describe('deleteBucket()', () => { - it('should delete not exists bucket throw NoSuchBucketError', async () => { - await utils.throws(async () => { - await store.deleteBucket('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - - it('should delete not empty bucket throw BucketNotEmptyError', async () => { - store.useBucket(bucket); - await store.put('oss-client-test-bucket.txt', __filename); - utils.sleep(ms(metaSyncTime)); - await utils.throws(async () => { - await store.deleteBucket(bucket); - }, 'BucketNotEmptyError'); - await store.delete('oss-client-test-bucket.txt'); - }); - }); - - describe('putBucketACL()', () => { - it('should set bucket acl to public-read-write', async () => { - const resultAcl = await store.putBucketACL(bucket, 'public-read-write'); - assert.equal(resultAcl.res.status, 200); - assert.equal(resultAcl.bucket, bucket); - - // Need wait some time for bucket meta sync - await utils.sleep(ms(metaSyncTime)); - - const r = await store.getBucketACL(bucket); - assert.equal(r.res.status, 200); - // skip it, data will be delay - // assert.equal(r.acl, 'public-read-write'); - }); - - it('should create and set acl when bucket not exists', async () => { - const bucketacl = `${bucket}-new`; - const putresult = await store.putBucketACL(bucketacl, 'public-read'); - assert.equal(putresult.res.status, 200); - assert.equal(putresult.bucket, bucketacl); - - await utils.sleep(ms(metaSyncTime)); - - const getresult = await store.getBucketACL(bucketacl); - assert.equal(getresult.res.status, 200); - assert.equal(getresult.acl, 'public-read'); - - await store.deleteBucket(bucketacl); - }); - }); - - describe('listBuckets()', () => { - let listBucketsPrefix; - before(async () => { - // create 2 buckets - listBucketsPrefix = `oss-client-list-buckets-${prefix.replace(/[/.]/g, '-')}`; - await Promise.all( - Array(2) - .fill(1) - .map((v, i) => store.putBucket(listBucketsPrefix + i)) - ); - }); - - it('should list buckets by prefix', async () => { - const result = await store.listBuckets( - { - prefix: listBucketsPrefix, - 'max-keys': 20, - }, - { - timeout, - } - ); - - assert(Array.isArray(result.buckets)); - assert.equal(result.buckets.length, 2); - assert(!result.isTruncated); - assert.equal(result.nextMarker, null); - assert(result.owner); - assert.equal(typeof result.owner.id, 'string'); - assert.equal(typeof result.owner.displayName, 'string'); - - for (let i = 0; i < 2; i++) { - const name = listBucketsPrefix + i; - assert.equal(result.buckets[i].name, name); - } - }); - - it('should list buckets by subres', async () => { - const tag = { - a: '1', - b: '2', - }; - const putTagBukcet = `${listBucketsPrefix}0`; - await store.putBucketTags(putTagBukcet, tag); - const { buckets } = await store.listBuckets({ - prefix: listBucketsPrefix, - subres: { - tagging: Object.entries(tag) - .map(_ => _.map(inner => `"${inner.toString()}"`).join(':')) - .join(','), - }, - }); - - if (buckets && buckets.length && buckets[0]) { - assert.deepStrictEqual(buckets[0].tag, tag); - } else { - assert(false); - } - }); - - after(async () => { - await Promise.all( - Array(2) - .fill(1) - .map((v, i) => store.deleteBucket(listBucketsPrefix + i)) - ); - }); - }); - - describe('putBucketLogging(), getBucketLogging(), deleteBucketLogging()', () => { - it('should create, get and delete the logging', async () => { - let result = await store.putBucketLogging(bucket, 'logs/'); - assert.equal(result.res.status, 200); - // put again will be fine - result = await store.putBucketLogging(bucket, 'logs/'); - assert.equal(result.res.status, 200); - - // get the logging setttings - result = await store.getBucketLogging(bucket); - assert.equal(result.res.status, 200); - - // delete it - result = await store.deleteBucketLogging(bucket); - assert.equal(result.res.status, 204); - }); - }); - - describe('putBucketWebsite(), getBucketWebsite(), deleteBucketWebsite()', () => { - it('should get and delete the website settings', async () => { - await store.putBucketWebsite(bucket, { - index: 'index.html', - }); - - await utils.sleep(ms(metaSyncTime)); - - // get - const get = await store.getBucketWebsite(bucket); - assert.equal(typeof get.index, 'string'); - assert.equal(get.res.status, 200); - - // delete it - const del = await store.deleteBucketWebsite(bucket); - assert.equal(del.res.status, 204); - }); - - it('should create when RoutingRules is Array or Object', async () => { - const routingRule1 = { - RuleNumber: '1', - Condition: { - KeyPrefixEquals: 'abc/', - HttpErrorCodeReturnedEquals: '404', - }, - Redirect: { - RedirectType: 'Mirror', - MirrorUsingRole: 'false', - MirrorUserLastModified: 'false', - PassQueryString: 'true', - MirrorIsExpressTunnel: 'false', - MirrorPassOriginalSlashes: 'false', - MirrorAllowHeadObject: 'false', - MirrorURL: 'http://www.test.com/', - MirrorPassQueryString: 'true', - MirrorFollowRedirect: 'true', - MirrorCheckMd5: 'true', - MirrorHeaders: { - PassAll: 'true', - Pass: [ 'myheader-key1', 'myheader-key2' ], - Remove: [ 'remove1', 'remove2' ], - Set: { - Key: 'myheader-key5', - Value: 'myheader-value5', - }, - }, - }, - }; - const routingRules = [ - { - RuleNumber: '2', - Condition: { - KeyPrefixEquals: 'a1bc/', - HttpErrorCodeReturnedEquals: '404', - }, - Redirect: { - RedirectType: 'Mirror', - MirrorUsingRole: 'false', - MirrorUserLastModified: 'false', - MirrorAllowHeadObject: 'false', - MirrorIsExpressTunnel: 'false', - MirrorPassOriginalSlashes: 'false', - PassQueryString: 'true', - MirrorURL: 'http://www.test1.com/', - MirrorPassQueryString: 'true', - MirrorFollowRedirect: 'true', - MirrorCheckMd5: 'true', - MirrorHeaders: { - PassAll: 'true', - Pass: [ 'myheader-key12', 'myheader-key22' ], - Remove: [ 'remove1', 'remove2' ], - Set: { - Key: 'myheader-key5', - Value: 'myheader-value5', - }, - }, - }, - }, - ]; - const website = { - index: 'index1.html', - supportSubDir: 'true', - type: '1', - error: 'error1.html', - routingRules, - }; - - const result1 = await store.putBucketWebsite(bucket, website); - assert.strictEqual(result1.res.status, 200); - const rules1 = await store.getBucketWebsite(bucket); - assert(includesConf(rules1.routingRules, routingRules)); - assert.strictEqual(rules1.supportSubDir, website.supportSubDir); - assert.strictEqual(rules1.type, website.type); - - website.routingRules = [ routingRule1 ]; - const result2 = await store.putBucketWebsite(bucket, website); - assert.strictEqual(result2.res.status, 200); - const rules2 = await store.getBucketWebsite(bucket); - assert(includesConf(rules2.routingRules, website.routingRules)); - }); - - it('should throw error when RoutingRules is not Array', async () => { - const website = { - index: 'index1.html', - supportSubDir: 'true', - type: '1', - error: 'error1.html', - routingRules: '', - }; - - try { - await store.putBucketWebsite(bucket, website); - assert(false); - } catch (error) { - assert.strictEqual(error.message, 'RoutingRules must be Array'); - } - try { - website.RoutingRules = 0; - await store.putBucketWebsite(bucket, website); - assert(false); - } catch (error) { - assert.strictEqual(error.message, 'RoutingRules must be Array'); - } - }); - }); - - describe('putBucketReferer(), getBucketReferer(), deleteBucketReferer()', () => { - it('should create, get and delete the referer', async () => { - const putresult = await store.putBucketReferer(bucket, true, [ 'http://npm.taobao.org' ], { timeout }); - assert.equal(putresult.res.status, 200); - - // put again will be fine - const referers = [ 'http://npm.taobao.org', 'https://npm.taobao.org', 'http://cnpmjs.org' ]; - const putReferer = await store.putBucketReferer(bucket, false, referers, { timeout }); - assert.equal(putReferer.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - // get - const getReferer = await store.getBucketReferer(bucket); - assert(Array.isArray(getReferer.referers)); - assert.equal(typeof getReferer.allowEmpty, 'boolean'); - assert.equal(getReferer.res.status, 200); - - // delete it - const deleteResult = await store.deleteBucketReferer(bucket); - assert.equal(deleteResult.res.status, 200); - }); - }); - - describe('putBucketCORS(), getBucketCORS(), deleteBucketCORS()', () => { - afterEach(async () => { - // delete it - const result = await store.deleteBucketCORS(bucket, { timeout }); - assert.equal(result.res.status, 204); - }); - - it('should create, get and delete the cors', async () => { - const rules = [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - allowedHeader: '*', - exposeHeader: 'Content-Length', - maxAgeSeconds: '30', - }, - ]; - const putResult = await store.putBucketCORS(bucket, rules); - assert.equal(putResult.res.status, 200); - - const getResult = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getResult.res.status, 200); - assert.deepEqual(getResult.rules, [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - allowedHeader: '*', - exposeHeader: 'Content-Length', - maxAgeSeconds: '30', - }, - ]); - }); - - it('should overwrite cors', async () => { - const rules1 = [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - timeout, - }, - ]; - const putCorsResult1 = await store.putBucketCORS(bucket, rules1); - assert.equal(putCorsResult1.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - const getCorsResult1 = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getCorsResult1.res.status, 200); - assert.deepEqual(getCorsResult1.rules, [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - }, - ]); - - const rules2 = [ - { - allowedOrigin: 'localhost', - allowedMethod: 'HEAD', - }, - ]; - const putCorsResult2 = await store.putBucketCORS(bucket, rules2); - assert.equal(putCorsResult2.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - const getCorsResult2 = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getCorsResult2.res.status, 200); - assert.deepEqual(getCorsResult2.rules, [ - { - allowedOrigin: 'localhost', - allowedMethod: 'HEAD', - }, - ]); - }); - - it('should check rules', async () => { - try { - await store.putBucketCORS(bucket); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'rules is required'); - } - }); - - it('should check allowedOrigin', async () => { - try { - await store.putBucketCORS(bucket, [{}]); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'allowedOrigin is required'); - } - }); - - it('should check allowedMethod', async () => { - try { - const rules = [ - { - allowedOrigin: '*', - }, - ]; - await store.putBucketCORS(bucket, rules); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'allowedMethod is required'); - } - }); - - it('should throw error when rules not exist', async () => { - try { - await store.getBucketCORS(bucket); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'The CORS Configuration does not exist.'); - } - }); - }); - - describe('putBucketRequestPayment(), getBucketRequestPayment()', () => { - it('should create, get the request payment', async () => { - try { - await store.putBucketRequestPayment(bucket, 'Requester'); - const result = await store.getBucketRequestPayment(bucket); - assert(result.payer === 'Requester', 'payer should be Requester'); - } catch (err) { - assert(false); - } - }); - - it('should throw error when payer is not BucketOwner or Requester', async () => { - try { - await store.putBucketRequestPayment(bucket, 'requester'); - } catch (err) { - assert(err.message.includes('payer must be BucketOwner or Requester')); - } - }); - }); - - describe('getBucketTags() putBucketTags() deleteBucketTags()', () => { - it('should get the tags of bucket', async () => { - try { - const result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - - it('should configures or updates the tags of bucket', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - result = await store.putBucketTags(bucket, tag); - assert.strictEqual(result.status, 200); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - } catch (error) { - assert(false, error); - } - - try { - const tag = { a: '1' }; - result = await store.putBucketTags(bucket, tag); - assert.strictEqual(result.status, 200); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - } catch (error) { - assert(false, error); - } - }); - - it('maximum of 20 tags for a bucket', async () => { - try { - const tag = {}; - Array(21) - .fill(1) - .forEach((_, index) => { - tag[index] = index; - }); - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('maximum of 20 tags for a bucket', error.message); - } - }); - - it('tag key can be a maximum of 64 bytes in length', async () => { - try { - const key = new Array(65).fill('1').join(''); - const tag = { [key]: '1' }; - - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('tag key can be a maximum of 64 bytes in length', error.message); - } - }); - - it('tag value can be a maximum of 128 bytes in length', async () => { - try { - const value = new Array(129).fill('1').join(''); - const tag = { a: value }; - - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('tag value can be a maximum of 128 bytes in length', error.message); - } - }); - - it('should throw error when the type of tag is not Object', async () => { - try { - const tag = [{ a: 1 }]; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert(error.message.includes('tag must be Object')); - } - }); - - it('should throw error when the type of tag value is number', async () => { - try { - const tag = { a: 1 }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should throw error when the type of tag value is Object', async () => { - try { - const tag = { a: { inner: '1' } }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should throw error when the type of tag value is Array', async () => { - try { - const tag = { a: [ '1', '2' ] }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should delete the tags of bucket', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - await store.putBucketTags(bucket, tag); - - result = await store.deleteBucketTags(bucket); - assert.strictEqual(result.status, 204); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - }); - - describe('putBucketEncryption(), getBucketEncryption(), deleteBucketEncryption()', () => { - it('should create, get and delete the bucket encryption', async () => { - // put with AES256 - const putresult1 = await store.putBucketEncryption(bucket, { - SSEAlgorithm: 'AES256', - }); - assert.equal(putresult1.res.status, 200); - // put again with KMS will be fine - // const putresult2 = await store.putBucketEncryption(bucket, { - // SSEAlgorithm: 'KMS', - // KMSMasterKeyID: '1b2c3132-b2ce-4ba3-a4dd-9885904099ad' - // }); - // assert.equal(putresult2.res.status, 200); - // await utils.sleep(ms(metaSyncTime)); - // get - const getBucketEncryption = await store.getBucketEncryption(bucket); - assert.equal(getBucketEncryption.res.status, 200); - assert.deepEqual(getBucketEncryption.encryption, { - SSEAlgorithm: 'AES256', - // KMSMasterKeyID: '1b2c3132-b2ce-4ba3-a4dd-9885904099ad' - }); - // delete - const deleteResult = await store.deleteBucketEncryption(bucket); - assert.equal(deleteResult.res.status, 204); - }); - }); - - describe('putBucketLifecycle()', () => { - // todo delete - it('should put the lifecycle with old api', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - days: 1, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration2', - prefix: 'logs/', - status: 'Enabled', - date: '2020-02-18T00:00:00.000Z', - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with expiration and id', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const getBucketLifecycle = await store.getBucketLifecycle(bucket); - assert(getBucketLifecycle.rules.length > 0 && getBucketLifecycle.rules.find(v => v.id === 'expiration1')); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with AbortMultipartUpload', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload1', - prefix: 'logs/', - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload2', - prefix: 'logs/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with empty prefix (whole bucket)', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload1', - prefix: '', // empty prefix (whole bucket) - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - }, - ]); - assert.equal(putresult.res.status, 200); - }); - - it('should put the lifecycle with Transition', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'logs/', - status: 'Enabled', - transition: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - storageClass: 'Archive', - }, - expiration: { - createdBeforeDate: '2020-02-17T00:00:00.000Z', - }, - tag: { - key: 'test', - value: '123', - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'logs/', - status: 'Enabled', - transition: { - days: 20, - storageClass: 'Archive', - }, - tag: { - key: 'test', - value: '123', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with expiration and Tag', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: { - key: 1, - value: '2', - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - tag: { - key: 1, - value: '2', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - - const putresult3 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - tag: [ - { - key: 1, - value: '2', - }, - { - key: 'testkey', - value: 'testvalue', - }, - ], - }, - ]); - assert.equal(putresult3.res.status, 200); - }); - - it('should throw error when id more than 255 bytes ', async () => { - const testID = Array(256).fill('a').join(''); - try { - await store.putBucketLifecycle(bucket, [ - { - id: testID, - prefix: 'testid/', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('255')); - } - }); - - it('should throw error when no prefix', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'prefix', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('prefix')); - } - }); - - it('should throw error when status is not Enabled or Disabled', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'status', - prefix: 'fix/', - status: 'test', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('Enabled or Disabled')); - } - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'status', - prefix: 'fix/', - status: '', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('Enabled or Disabled')); - } - }); - - it('should throw error when storageClass is not Archive or IA', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - storageClass: 'test', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('IA or Archive')); - } - }); - - it('should throw error when transition must have days or createdBeforeDate', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - transition: { - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('days or createdBeforeDate')); - } - }); - - it('should throw error when days of transition is not a positive integer', async () => { - const errorMessage = 'a positive integer'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - days: 1.1, - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - days: 'asd', - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when createdBeforeDate of transition is not iso8601 format', async () => { - const errorMessage = 'iso8601'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: new Date().toISOString(), // eg: YYYY-MM-DDT00:00:00.000Z - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: new Date().toString(), - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when abortMultipartUpload must have days or createdBeforeDate', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: {}, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('days or createdBeforeDate')); - } - }); - - it('should throw error when days of abortMultipartUpload is not a positive integer', async () => { - const errorMessage = 'a positive integer'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - days: 1.1, - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - days: 'a', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when createdBeforeDate of abortMultipartUpload is not iso8601 format', async () => { - const errorMessage = 'iso8601'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: new Date().toISOString(), // eg: YYYY-MM-DDT00:00:00.000Z - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: new Date().toString(), // eg: YYYY-MM-DDT00:00:00.000Z - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when rule have no expiration or abortMultipartUpload', async () => { - const errorMessage = 'expiration or abortMultipartUpload'; - try { - await store.putBucketLifecycle(bucket, [ - { - prefix: 'expirationAndAbortMultipartUpload/', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when tag is used with abortMultipartUpload', async () => { - const errorMessage = 'Tag cannot be used with abortMultipartUpload'; - try { - await store.putBucketLifecycle(bucket, [ - { - prefix: 'expirationAndAbortMultipartUpload/', - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - expiration: { - days: 1, - }, - tag: { - value: '1', - key: 'test', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - }); - - describe('getBucketLifecycle()', () => { - it('should get the lifecycle', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'get_test', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: [ - { - key: 'test', - value: '1', - }, - { - key: 'test1', - value: '2', - }, - ], - }, - ]); - assert.equal(putresult.res.status, 200); - - const getBucketLifecycle = await store.getBucketLifecycle(bucket); - assert(getBucketLifecycle.rules.length > 0); - assert.equal(getBucketLifecycle.res.status, 200); - }); - }); - - describe('deleteBucketLifecycle()', () => { - it('should delete the lifecycle', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'delete', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: [ - { - key: 'test', - value: '1', - }, - { - key: 'test1', - value: '2', - }, - ], - }, - ]); - assert.equal(putresult.res.status, 200); - - // delete it - const deleteResult = await store.deleteBucketLifecycle(bucket); - assert.equal(deleteResult.res.status, 204); - }); - }); - - describe('getBucketPolicy() putBucketPolicy() deleteBucketPolicy()', () => { - it('should put, get, delete, when policy is Object', async () => { - try { - const policy = { - Version: '1', - Statement: [ - { - Action: [ 'oss:PutObject', 'oss:GetObject' ], - Effect: 'Deny', - Principal: [ '1234567890' ], - Resource: [ 'acs:oss:*:1234567890:*/*' ], - }, - ], - }; - const result = await store.putBucketPolicy(bucket, policy); - assert.strictEqual(result.status, 200); - const result1 = await store.getBucketPolicy(bucket); - assert.deepStrictEqual(policy, result1.policy); - const result2 = await store.deleteBucketPolicy(bucket); - assert.strictEqual(result2.status, 204); - const result3 = await store.getBucketPolicy(bucket); - assert.deepStrictEqual(null, result3.policy); - } catch (err) { - assert(false, err.message); - } - }); - it('should throw error, when policy is not Object', async () => { - try { - await store.putBucketPolicy(bucket, 'policy'); - assert(false); - } catch (err) { - assert(true); - } - }); - }); - describe('inventory()', () => { - const inventory = { - id: 'default', - isEnabled: false, - prefix: 'ttt', - OSSBucketDestination: { - format: 'CSV', - accountId: '1817184078010220', - rolename: 'AliyunOSSRole', - bucket, - prefix: 'test', - }, - frequency: 'Daily', - includedObjectVersions: 'All', - optionalFields: { - field: [ 'Size', 'LastModifiedDate' ], - }, - }; - - describe('putBucketInventory', () => { - before(() => { - inventory.OSSBucketDestination.bucket = bucket; - }); - it('should put bucket inventory', async () => { - try { - await store.putBucketInventory(bucket, inventory); - } catch (err) { - assert(false, err); - } - }); - it('should return inventory array when inventory is one config', async () => { - const inventoryRes = await store.listBucketInventory(bucket); - assert(Array.isArray(inventoryRes.inventoryList)); - assert(inventoryRes.inventoryList.length === 1); - assert.strictEqual(inventoryRes.status, 200); - }); - it('should put bucket inventory when no optionalFields or no field', async () => { - try { - inventory.id = 'test_optionalFields'; - delete inventory.optionalFields; - await store.putBucketInventory(bucket, inventory); - - inventory.id = 'test_field'; - inventory.optionalFields = {}; - await store.putBucketInventory(bucket, inventory); - - inventory.id = 'test_field_is_one'; - inventory.optionalFields = { - field: [ 'Size' ], - }; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when no prefix', async () => { - try { - inventory.id = 'test_prefix'; - delete inventory.prefix; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when no OSSBucketDestination prefix', async () => { - try { - inventory.id = 'test_OSSBucketDestination_prefix'; - delete inventory.OSSBucketDestination.prefix; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when has encryption', async () => { - try { - inventory.id = 'test_encryption_SSE-OSS'; - inventory.OSSBucketDestination.encryption = { 'SSE-OSS': '' }; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - }); - describe('getBucketInventory', () => { - let testGetInventory; - it('should get bucket inventory by inventoryId', async () => { - try { - const result = await store.getBucketInventory(bucket, inventory.id); - testGetInventory = result.inventory; - assert(includesConf(testGetInventory, inventory)); - } catch (err) { - assert(false); - } - }); - it('should return Field array when Field value is one length Array', async () => { - try { - assert( - testGetInventory.optionalFields && - testGetInventory.optionalFields.field && - Array.isArray(testGetInventory.optionalFields.field) && - testGetInventory.optionalFields.field.length === 1 - ); - } catch (err) { - assert(false); - } - }); - }); - describe('listBucketInventory', () => { - before(async () => { - let _index = 0; - async function putInventoryList() { - await Promise.all( - new Array(1).fill(1).map(() => { - _index++; - return store.putBucketInventory(bucket, Object.assign({}, inventory, { id: `test_list_${_index}` })); - }) - ); - } - - await putInventoryList(); - }); - it('should list bucket inventory', async () => { - const inventoryRes = await store.listBucketInventory(bucket); - assert.strictEqual(inventoryRes.status, 200); - }); - }); - describe('deleteBucketInventory', () => { - it('should delete bukcet inventory', async () => { - let inventoryList = []; - let isTruncated; - let continuationToken; - do { - const inventoryRes = await store.listBucketInventory(bucket, { continuationToken }); - inventoryList = [ ...inventoryList, ...inventoryRes.inventoryList ]; - isTruncated = inventoryRes.isTruncated; - continuationToken = inventoryRes.nextContinuationToken; - } while (isTruncated); - try { - // avoid Qps limit - do { - const list = inventoryList.splice(0, 10); - // eslint-disable-next-line no-loop-func - await Promise.all(list.map(_ => store.deleteBucketInventory(bucket, _.id))); - utils.sleep(400); - } while (inventoryList.length); - assert(true); - } catch (err) { - assert(false, err); - } - }); - }); - - describe('bucket response status code', async () => { - it('success getBucketInfo, status code should be 200', async () => { - const result = await store.getBucketInfo(bucket); - assert.equal(result.res.status, 200); - }); - it('no equivalent bucket ,status code should be 404', async () => { - try { - await store.getBucketInfo('adasdasdxcvmxvnxvmdfsdfsdf'); - } catch (err) { - assert.equal(err.status, 404); - } - }); - it('bucket name already exists,status code should be 409', async () => { - try { - await store.putBucket(bucket); - } catch (err) { - assert.equal(err.status, 409); - } - }); - }); - describe('getBucketStat', () => { - it('should get bucket stat', async () => { - const result = await store.getBucketStat(bucket); - assert.equal(typeof result.stat, 'object'); - assert.equal(result.res.status, 200); - }); - }); - }); -}); diff --git a/test/bucket_worm.test.js b/test/bucket_worm.test.js deleted file mode 100644 index 8c5829a3c..000000000 --- a/test/bucket_worm.test.js +++ /dev/null @@ -1,80 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const timeout = require('./config').timeout; - -describe.skip('test/bucket_worm.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-worm-bucket-worm-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - // github CI will remove buckets - // restore object will have cache - // after(async () => { - // await utils.cleanBucket(store, bucket); - // }); - describe('worm()', () => { - describe('initiateBucketWorm()', () => { - it('should init bucket worm', async () => { - try { - await store.initiateBucketWorm(bucket, '1'); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('abortBucketWorm()', () => { - it('should abort bucket worm', async () => { - try { - await store.abortBucketWorm(bucket); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('completeBucketWorm(), getBucketWorm()', () => { - it('should complete bucket worm', async () => { - const { wormId } = await store.initiateBucketWorm(bucket, '1'); - try { - await store.completeBucketWorm(bucket, wormId); - assert(true); - } catch (error) { - assert(false, error); - } - - try { - const result = await store.getBucketWorm(bucket); - assert(result.wormId); - } catch (error) { - assert(false, error); - } - }); - }); - describe('extendBucketWorm()', () => { - it('should extend bucket worm', async () => { - try { - const { wormId, days } = await store.getBucketWorm(bucket); - await store.extendBucketWorm(bucket, wormId, (days * 1 + 1).toString()); - const result = await store.getBucketWorm(bucket); - assert(result.days - days === 1); - } catch (error) { - assert(false, error); - } - }); - }); - }); -}); diff --git a/test/bukcet_worm.test.js b/test/bukcet_worm.test.js deleted file mode 100644 index 96bfd5235..000000000 --- a/test/bukcet_worm.test.js +++ /dev/null @@ -1,87 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const { timeout } = require('./config'); - -describe.skip('test/bucket.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-worm2-bucket-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - - // github CI will remove buckets - // restore object will have cache - // after(async () => { - // await utils.cleanBucket(store, bucket); - // }); - - describe('worm()', () => { - describe('initiateBucketWorm()', () => { - it('should init bucket worm', async () => { - try { - await store.initiateBucketWorm(bucket, '1'); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('abortBucketWorm()', () => { - it('should abort bucket worm', async () => { - try { - await store.abortBucketWorm(bucket); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('completeBucketWorm(), getBucketWorm()', () => { - it('should complete bucket worm', async () => { - const { wormId } = await store.initiateBucketWorm(bucket, '1'); - try { - await store.completeBucketWorm(bucket, wormId); - assert(true); - } catch (error) { - assert(false, error); - } - - try { - const result = await store.getBucketWorm(bucket); - assert(result.wormId); - } catch (error) { - assert(false, error); - } - }); - }); - describe('extendBucketWorm()', () => { - it('should extend bucket worm', async () => { - try { - const { wormId, days } = await store.getBucketWorm(bucket); - await store.extendBucketWorm( - bucket, - wormId, - (days * 1 + 1).toString() - ); - const result = await store.getBucketWorm(bucket); - assert(result.days - days === 1); - } catch (error) { - assert(false, error); - } - }); - }); - }); - -}); diff --git a/test/client.test.js b/test/client.test.js deleted file mode 100644 index f8d1d101c..000000000 --- a/test/client.test.js +++ /dev/null @@ -1,396 +0,0 @@ -const assert = require('assert'); -const { Client } = require('..'); -const config = require('./config').oss; - -describe('test/client.test.js', () => { - it('init stsTokenFreshTime', () => { - const store = new Client(config); - const now = new Date(); - if (!store.stsTokenFreshTime) { - throw new Error('not init stsTokenFreshTime'); - } - assert(true, +now <= +store.stsTokenFreshTime); - }); - - it('should init with region', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://oss-cn-hangzhou.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://oss-cn-hangzhou-internal.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - secure: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'https://oss-cn-hangzhou-internal.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-beijing', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://vpc100-oss-cn-beijing.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-shenzhen', - internal: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://vpc100-oss-cn-shenzhen.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'https://vpc100-oss-cn-hangzhou.aliyuncs.com/' - ); - }); - - it('should init with cname: foo.bar.com', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://foo.bar.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://foo.bar.com', - cname: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://foo.bar.com/' - ); - }); - - it('should init with endpoint: http://test.oss.com', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - secure: true, - endpoint: 'test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'https://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'https://test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'https://test.oss.com/' - ); - }); - - it('should init with ip address: http://127.0.0.1', () => { - const store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: '127.0.0.1', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://127.0.0.1/' - ); - }); - - it('should create request url with bucket', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - let params = { - bucket: 'gems', - }; - - let url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.test.oss.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://foo.bar.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://127.0.0.1:6000', - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://127.0.0.1:6000/'); - }); - - it('should create request url with bucket/object/subres', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - let params = { - bucket: 'gems', - object: 'hello', - }; - - let url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/hello'); - - params = { - bucket: 'gems', - object: 'hello', - subres: { acl: '', mime: '' }, - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/hello?acl=&mime='); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.test.oss.com/hello'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://foo.bar.com/hello'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://127.0.0.1:3000', - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://127.0.0.1:3000/hello'); - }); - - it('should set User-Agent', async () => { - const store = new Client(config); - store.useBucket(config.bucket); - const result = await store.getBucketInfo(); - assert.equal(result.res.status, 200); - assert(result.bucket.Name === config.bucket); - }); - - it('should trim access id/key', () => { - const store = new Client({ - accessKeyId: ' \tfoo\t\n ', - accessKeySecret: ' \tbar\n\r ', - region: 'oss-cn-hangzhou', - }); - - assert.equal(store.options.accessKeyId, 'foo'); - assert.equal(store.options.accessKeySecret, 'bar'); - }); - - describe('checkConfigValid', () => { - it('should success when endpoint is invalid', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - } catch (error) { - assert(false); - } - }); - it('should throw when endpoint includes invalid character', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - }); - it('should throw when endpoint change to invalid character', async () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - const store = new Client(checkConfig); - const invalidHost = 'vpc100-oss-cn-hangzhou.《》.com'; - store.options.endpoint.host = invalidHost; - store.options.endpoint.hostname = invalidHost; - await store.listBuckets(); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - }); - it('should success when region is valid', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - } catch (error) { - assert(false); - } - }); - it('should throw when region includes invalid character', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-?hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - assert(false); - } catch (error) { - assert(error.message.includes('region')); - } - }); - }); -}); diff --git a/test/cluster.test.js b/test/cluster.test.js deleted file mode 100644 index 5956a4715..000000000 --- a/test/cluster.test.js +++ /dev/null @@ -1,531 +0,0 @@ -const oss = require('..'); -const cluster = require('..').ClusterClient; -const config = require('./config').oss; -const utils = require('./utils'); -const assert = require('assert'); -const mm = require('mm'); - -describe.skip('test/cluster.test.js', () => { - const { prefix } = utils; - afterEach(mm.restore); - - before(async function() { - this.region = config.region; - this.bucket1 = `oss-client-test-cluster1-${prefix.replace(/[/.]/g, '')}`; - this.bucket2 = `oss-client-test-cluster2-${prefix.replace(/[/.]/g, '')}`; - const client = oss(config); - await client.putBucket(this.bucket1); - await client.putBucket(this.bucket2); - }); - - before(function(done) { - const options = { - cluster: [ - { - accessKeyId: config.accessKeyId, - accessKeySecret: config.accessKeySecret, - bucket: this.bucket1, - endpoint: config.endpoint, - }, - { - accessKeyId: config.accessKeyId, - accessKeySecret: config.accessKeySecret, - bucket: this.bucket2, - endpoint: config.endpoint, - }, - ], - }; - this.store = cluster(options); - this.store.on('error', err => { - if (err.name === 'MockError' || err.name === 'CheckAvailableError') { - return; - } - console.error(err.stack); - }); - this.store.ready(done); - }); - - after(async function() { - await utils.cleanBucket(this.store.clients[0], this.bucket1); - await utils.cleanBucket(this.store.clients[1], this.bucket2); - this.store.close(); - }); - - describe('init', () => { - it('require options.cluster to be an array', () => { - (function() { - cluster({}); - }).should.throw('require options.cluster to be an array'); - }); - - it('should _init() _checkAvailable throw error', function(done) { - this.store.once('error', err => { - err.message.should.equal('mock error'); - done(); - }); - mm.error(this.store, '_checkAvailable', 'mock error'); - this.store._init(); - }); - - it('should skip put status file when ignoreStatusFile is set', async function() { - mm.error(this.store, 'put', 'mock error'); - await this.store._checkAvailable(true); - }); - }); - - describe('put()', () => { - it('should add object with local file path', async function() { - const name = `${prefix}oss-client/oss/put-localfile.js`; - const object = await this.store.put(name, __filename); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert(object.name, name); - }); - - it('should error when any one is error', async function() { - mm.error(this.store.clients[1], 'put', 'mock error'); - const name = `${prefix}oss-client/oss/put-localfile.js`; - try { - await this.store.put(name, __filename); - throw new Error('should never exec'); - } catch (err) { - err.message.should.equal('mock error'); - } - }); - - it('should ignore when any one is error', async function() { - mm.error(this.store.clients[1], 'put', 'mock error'); - const name = `${prefix}oss-client/oss/put-localfile.js`; - try { - await this.store.put(name, __filename); - throw new Error('should never exec'); - } catch (err) { - err.message.should.equal('mock error'); - } - }); - }); - - describe('putACL() and getACL()', () => { - it('should add object with local file path', async function() { - const name = `${prefix}oss-client/oss/put-localfile.js`; - const object = await this.store.put(name, __filename); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert(object.name, name); - - let res = await this.store.getACL(name); - assert.equal(res.acl, 'default'); - - await this.store.putACL(name, 'public-read'); - res = await this.store.getACL(name); - assert.equal(res.acl, 'public-read'); - }); - }); - - describe('get()', () => { - before(async function() { - this.name = `${prefix}oss-client/oss/get-meta.js`; - const object = await this.store.put(this.name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - this.headers = object.res.headers; - }); - - it('should RR get from clients ok', async function() { - mm(this.store.clients[1], 'get', async () => { - throw new Error('mock error'); - }); - function onerror(err) { - throw err; - } - this.store.on('error', onerror); - - let res = await this.store.get(this.name); - res.res.status.should.equal(200); - mm.restore(); - mm(this.store.clients[0], 'get', async () => { - throw new Error('mock error'); - }); - res = await this.store.get(this.name); - res.res.status.should.equal(200); - - this.store.removeListener('error', onerror); - }); - - it('should RR get from clients[1] when clients[0] not available', async function() { - this.store.index = 0; - mm(this.store.availables, '0', false); - mm.data(this.store.clients[0], 'get', 'foo'); - let r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(0); - - // again should work - r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(0); - }); - - it('should RR get from clients[1] when clients[0] error ok', async function() { - this.store.index = 0; - mm.error(this.store.clients[0], 'get', 'mock error'); - let r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(1); - - // again should work - r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(0); - }); - - it('should RR get from clients[0] when clients[1] not available', async function() { - this.store.index = 0; - mm(this.store.availables, '1', false); - mm.data(this.store.clients[1], 'get', 'foo'); - let r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(1); - - // again should work - r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(1); - }); - - it('should RR get from clients[0] when clients[1] error ok', async function() { - this.store.index = 0; - mm.error(this.store.clients[1], 'get', 'mock error'); - let r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(1); - - // again should work - r = await this.store.get(this.name); - r.res.status.should.equal(200); - this.store.index.should.equal(0); - }); - - it('should MS always get from clients[0] ok', async function() { - mm(this.store, 'schedule', 'masterSlave'); - mm(this.store.clients[1], 'get', 'mock error'); - function onerror() { - throw new Error('should not emit error event'); - } - this.store.on('error', onerror); - - let res = await this.store.get(this.name); - res.res.status.should.equal(200); - res = await this.store.get(this.name); - res.res.status.should.equal(200); - - this.store.removeListener('error', onerror); - }); - - it('should MS always get from clients[0] when masterOnly === true', async function() { - mm(this.store, 'schedule', 'masterSlave'); - mm(this.store, 'masterOnly', true); - mm(this.store.clients[1], 'get', 'mock error'); - function onerror() { - throw new Error('should not emit error event'); - } - this.store.on('error', onerror); - - let res = await this.store.get(this.name); - res.res.status.should.equal(200); - res = await this.store.get(this.name); - res.res.status.should.equal(200); - - this.store.removeListener('error', onerror); - }); - - it('should get from clients[0] when clients[0] response 4xx ok', async function() { - mm(this.store, 'schedule', 'masterSlave'); - mm.error(this.store.clients[0], 'get', 'mock error', { status: 403 }); - try { - await this.store.get(this.name); - throw new Error('should never exec'); - } catch (err) { - err.status.should.equal(403); - } - }); - - it('should RR error when clients all down', async function() { - mm.error(this.store.clients[0], 'get', 'mock error'); - mm.error(this.store.clients[1], 'get', 'mock error'); - try { - await this.store.get(this.name); - throw new Error('should never exec'); - } catch (err) { - err.name.should.equal('MockError'); - err.message.should.equal('mock error (all clients are down)'); - } - }); - - it('should MS error when clients all down', async function() { - mm(this.store, 'schedule', 'masterSlave'); - mm.error(this.store.clients[0], 'get', 'mock error'); - mm.error(this.store.clients[1], 'get', 'mock error'); - try { - await this.store.get(this.name); - throw new Error('should never exec'); - } catch (err) { - err.name.should.equal('MockError'); - err.message.should.equal('mock error (all clients are down)'); - } - }); - - it('should RR throw error when read err status >= 200 && < 500', async function() { - mm(this.store.clients[0], 'get', async () => { - const err = new Error('mock error'); - throw err; - }); - mm(this.store.clients[1], 'get', async () => { - const err = new Error('mock 302 error'); - err.status = 302; - throw err; - }); - - this.store.index = 0; - try { - await this.store.get(this.name); - throw new Error('should not run this'); - } catch (err) { - err.status.should.equal(302); - } - - mm(this.store.clients[0], 'get', async () => { - const err = new Error('mock 404 error'); - err.status = 404; - throw err; - }); - mm(this.store.clients[1], 'get', async () => { - const err = new Error('mock error'); - throw err; - }); - this.store.index = 1; - try { - await this.store.get(this.name); - throw new Error('should not run this'); - } catch (err) { - err.status.should.equal(404); - } - }); - - it('should RR use the first client when all server down', async function() { - mm(this.store.availables, '0', false); - mm(this.store.availables, '1', false); - - this.store.index = 0; - await this.store.get(this.name); - - this.store.index = 1; - await this.store.get(this.name); - }); - }); - - describe('signatureUrl(), getObjectUrl()', () => { - before(async function() { - this.name = `${prefix}oss-client/oss/get-meta.js`; - const object = await this.store.put(this.name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - this.headers = object.res.headers; - }); - - it('should get object cdn url', function() { - const url = this.store.getObjectUrl(this.name); - assert(/\.aliyuncs\.com\//.test(url), url); - assert(/\/ali-sdk\/oss\/get-meta\.js$/.test(url), url); - - const cdnurl = this.store.getObjectUrl(this.name, 'https://foo.com'); - assert(/^https:\/\/foo\.com\//.test(cdnurl), cdnurl); - assert(/\/ali-sdk\/oss\/get-meta\.js$/.test(cdnurl), cdnurl); - }); - - it('should RR signatureUrl from clients ok', function() { - mm(this.store.clients[1], 'head', 'mock error'); - let url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - mm.restore(); - mm(this.store.clients[0], 'head', 'mock error'); - url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should RR signature from clients[1] when clients[0] error ok', function() { - const url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should MS always signature from clients[0] ok', function() { - mm(this.store, 'schedule', 'masterSlave'); - mm(this.store.clients[1], 'head', 'mock error'); - let url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should signature from clients[0] when clients[0] response 4xx ok', function() { - mm(this.store, 'schedule', 'masterSlave'); - mm.error(this.store.clients[0], 'head', 'mock error', { status: 403 }); - const url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should signature ok when clients all down', function() { - mm.error(this.store.clients[0], 'head', 'mock error'); - mm.error(this.store.clients[1], 'head', 'mock error'); - const url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should RR use the first client when all server down', function() { - mm(this.store.availables, '0', false); - mm(this.store.availables, '1', false); - - this.store.index = 0; - let url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - - this.store.index = 1; - url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - - it('should masterSlave use the first client when all server down', function() { - mm(this.store, 'schedule', 'masterSlave'); - mm(this.store.availables, '0', false); - mm(this.store.availables, '1', false); - - this.store.index = 0; - let url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - - this.store.index = 1; - url = this.store.signatureUrl(this.name); - url.should.match(/ali-sdk\/oss\/get-meta\.js/); - }); - }); - - describe('_checkAvailable()', () => { - it('should write status file on the first check', async function() { - await this.store._checkAvailable(true); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - }); - - it('should write status pass', async function() { - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - }); - - it('should available on err status 404', async function() { - mm(this.store.clients[0], 'head', async () => { - const err = new Error('mock 404 error'); - err.status = 404; - throw err; - }); - - mm(this.store.clients[1], 'head', async () => { - const err = new Error('mock 300 error'); - err.status = 300; - throw err; - }); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - }); - - it('should not available on err status < 200 or >= 500', async function() { - mm(this.store.clients[0], 'head', async () => { - const err = new Error('mock -1 error'); - err.status = -1; - throw err; - }); - - mm(this.store.clients[1], 'head', async () => { - const err = new Error('mock 500 error'); - err.status = 500; - throw err; - }); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(false); - this.store.availables['1'].should.equal(false); - }); - - it('should available on error count < 3', async function() { - // client[0] error 2 times - let count = 0; - mm(this.store.clients[0], 'head', async name => { - count++; - if (count === 3) { - return { name }; - } - throw new Error('mock error'); - }); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - count.should.equal(3); - mm.restore(); - - // client[1] error 1 times - count = 0; - mm(this.store.clients[1], 'head', async name => { - count++; - if (count === 2) { - return { name }; - } - throw new Error('mock error'); - }); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - count.should.equal(2); - }); - - it('should try 3 times on check status fail', async function() { - // client[0] error - mm.error(this.store.clients[0], 'head', 'mock error'); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(false); - this.store.availables['1'].should.equal(true); - mm.restore(); - - // client[1] error - mm.error(this.store.clients[1], 'head', 'mock error'); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(false); - mm.restore(); - - // all down - mm.error(this.store.clients[0], 'head', 'mock error'); - mm.error(this.store.clients[1], 'head', 'mock error'); - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(false); - this.store.availables['1'].should.equal(false); - mm.restore(); - - // recover - await this.store._checkAvailable(); - this.store.availables['0'].should.equal(true); - this.store.availables['1'].should.equal(true); - }); - }); -}); diff --git a/test/config.js b/test/config.js deleted file mode 100644 index 481d8254e..000000000 --- a/test/config.js +++ /dev/null @@ -1,22 +0,0 @@ -const { env } = process; - -const config = module.exports; - -config.oss = { - accessKeyId: env.ALI_SDK_OSS_ID, - accessKeySecret: env.ALI_SDK_OSS_SECRET, - region: env.ALI_SDK_OSS_REGION, - endpoint: env.ALI_SDK_OSS_ENDPOINT, - bucket: env.ALI_SDK_OSS_BUCKET, -}; - -config.sts = { - accessKeyId: env.ALI_SDK_STS_ID, - accessKeySecret: env.ALI_SDK_STS_SECRET, - roleArn: env.ALI_SDK_STS_ROLE, - bucket: env.ALI_SDK_STS_BUCKET, - endpoint: env.ALI_SDK_STS_ENDPOINT, -}; - -config.metaSyncTime = env.CI ? '30s' : '1000ms'; -config.timeout = '120s'; diff --git a/test/config.ts b/test/config.ts new file mode 100644 index 000000000..04abb8005 --- /dev/null +++ b/test/config.ts @@ -0,0 +1,11 @@ +export default { + prefix: `${process.platform}-${process.version}-${new Date().getTime()}/`, + oss: { + accessKeyId: process.env.OSS_CLIENT_ID! || process.env.ALI_SDK_OSS_ID!, + accessKeySecret: process.env.OSS_CLIENT_SECRET! || process.env.ALI_SDK_OSS_SECRET!, + region: process.env.OSS_CLIENT_REGION || process.env.ALI_SDK_OSS_REGION, + endpoint: process.env.OSS_CLIENT_ENDPOINT! || process.env.ALI_SDK_OSS_ENDPOINT!, + bucket: process.env.OSS_CLIENT_BUCKET! || process.env.ALI_SDK_OSS_BUCKET!, + }, + timeout: '120s', +}; diff --git a/test/dataFix.test.js b/test/dataFix.test.js deleted file mode 100644 index cd1d7921a..000000000 --- a/test/dataFix.test.js +++ /dev/null @@ -1,225 +0,0 @@ -const assert = require('assert'); -const { dataFix } = require('../lib/common/utils/dataFix'); -const { sleep } = require('./utils'); - -describe('dataFix()', () => { - before(async () => { - await sleep(1000); - }); - describe('data is not object', () => { - it('should return without handle', () => { - const data = 'string'; - - const conf = { - remove: [ 'rm', 'rm2' ], - }; - dataFix(data, conf); - }); - }); - - describe('remove : array - remove unwanted props', () => { - it('should remove what is not needed', () => { - const data = { - rmNot: 'do NOT remove me', - rm: [], - rm2: 'what ever value dos NOT matter', - }; - - const conf = { - remove: [ 'rm', 'rm2' ], - }; - - dataFix(data, conf); - - assert(!conf.remove.find(_ => Object.prototype.hasOwnProperty.call(data, _))); - assert(Object.prototype.hasOwnProperty.call(data, 'rmNot')); - }); - }); - - describe('lowerFirst : boolean - turn key into first-letter-lower-case', () => { - const One = 'One'; - const Another = 'Another'; - const Both = 'Both'; - const both = 'both'; - const data = { - One, - Another, - Both, - both, - }; - - dataFix(data, { - lowerFirst: true, - }); - - it('should covert and remove the Old', () => { - assert(!data.One); - assert(!data.Another); - assert(data.one); - assert(data.another); - }); - - it('should not covert if lower-case will replace existed', () => { - assert.strictEqual(Both, data.Both); - assert.strictEqual(both, data.both); - }); - }); - - describe('bool : array - turn values into boolean if can be converted', () => { - const cannotConvertNumber2 = 2; - const cannotConvertOtherString = 'cannot convert'; - const data = { - trueB: true, - trueL: 'true', - trueU: 'TRUE', - true1: '1', - true1N: 1, - falseB: false, - falseL: 'false', - falseU: 'FALSE', - false0: '0', - false0N: 0, - falseNull: null, - cannotConvertNumber2, - cannotConvertOtherString, - }; - - dataFix(data, { - bool: [ - 'trueB', - 'trueL', - 'trueU', - 'true1', - 'true1N', - 'falseB', - 'falseL', - 'falseU', - 'false0', - 'false0N', - 'falseNull', - 'cannotConvertNumber2', - 'cannotConvertOtherString', - 'nonExist', - ], - }); - - it('should boolean true/false remain boolean', () => { - assert.strictEqual(data.trueB, true); - assert.strictEqual(data.falseB, false); - }); - - it('should convert true TURE 1 (number or string) to boolean true', () => { - assert.strictEqual(data.trueL, true); - assert.strictEqual(data.trueU, true); - assert.strictEqual(data.true1, true); - assert.strictEqual(data.true1N, true); - }); - - it('should convert false FALSE 0 (number or string) to boolean false', () => { - assert.strictEqual(data.falseL, false); - assert.strictEqual(data.falseU, false); - assert.strictEqual(data.false0, false); - assert.strictEqual(data.false0N, false); - }); - - it('should convert null / undefined to false', () => { - assert.strictEqual(data.falseNull, false); - assert.strictEqual(data.nonExist, false); - }); - - it('should leave those cannot be converted as is', () => { - assert.strictEqual(cannotConvertNumber2, data.cannotConvertNumber2); - assert.strictEqual( - cannotConvertOtherString, - data.cannotConvertOtherString - ); - }); - }); - - describe('rename : object - rename bad prop keys into better names', () => { - const existValue = 123456; - const renameToAlready = 'rename to already'; - const alreadyExist = 'already'; - const data = { - existValue, - renameToAlready, - alreadyExist, - }; - - dataFix(data, { - rename: { - existValue: 'existValueRenamed', - nonExistValue: 'nonExistValueRenamed', - renameToAlready: 'alreadyExist', - }, - }); - - it('should replace existed values with new name and same value', () => { - assert(!data.existValue); - assert.strictEqual(data.existValueRenamed, existValue); - }); - - it('should not add prop when the prop-to-be-renamed does NOT exist', () => { - assert(!data.nonExistValueRenamed); - assert(!data.nonExistValue); - }); - - it('should not rename if a name already exist', () => { - assert.strictEqual(data.alreadyExist, alreadyExist); - assert.strictEqual(data.renameToAlready, renameToAlready); - }); - }); - - describe('camel : array - turn key into camel string', () => { - const Both = 'Both'; - const both = 'bothBoth'; - const data = { - One: 'One', - 'Another-another': 'Another-another', - 'Both-both': Both, - bothBoth: both, - }; - - dataFix(data, { - camel: [ ...Object.keys(data), 'noExistkey' ], - }); - - it('should covert and remove the Old', () => { - assert(data.one); - assert(data.anotherAnother); - }); - - it('should not covert if camel will replace existed', () => { - assert.strictEqual(Both, data['Both-both']); - assert.strictEqual(both, data.bothBoth); - }); - - it('should not covert if camel origin key is not exist', () => { - // eslint-disable-next-line no-prototype-builtins - assert(!data.hasOwnProperty('NoExistkey')); - }); - }); - - describe('finalKill: function', () => { - it('should correct fix data', () => { - const data = { - test: 1, - test1: 2, - needDelete: 'del', - needDelete1: 'del', - }; - - const delKey = 'needDelete'; - const addKey = 'addKey'; - dataFix(data, {}, o => { - Object.keys(o).forEach(_ => { - if (_.includes(delKey)) delete o[_]; - }); - o[addKey] = addKey; - }); - - assert(!Object.keys(data).find(_ => _.includes(delKey))); - assert.strictEqual(data.addKey, addKey); - }); - }); -}); diff --git a/test/endpointIsIP.test.js b/test/endpointIsIP.test.js deleted file mode 100644 index f38407296..000000000 --- a/test/endpointIsIP.test.js +++ /dev/null @@ -1,33 +0,0 @@ -const dns = require('dns').promises; -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('../lib/client'); -const config = require('./config').oss; - -describe.skip('test/endpointIsIP.test.js', () => { - const { prefix } = utils; - let store; - before(async () => { - store = oss(config); - const bucket = config.bucket; - const endpoint = await dns.lookup(`${bucket}.${store.options.endpoint.hostname}`); - console.log(`${bucket}.${store.options.endpoint.hostname}`); - console.log(endpoint); - const testEndponitConfig = Object.assign({}, config, { - cname: true, - endpoint: endpoint.address, - }); - store = oss(testEndponitConfig); - store.useBucket(bucket); - }); - - describe('endpoint is ip', () => { - it('should put and get', async () => { - const name = `${prefix}oss-client/oss/putWidhIP.js`; - const object = await store.put(name, __filename); - assert.equal(object.name, name); - const result = await store.get(name); - assert(result.res.status, 200); - }); - }); -}); diff --git a/test/multipart.test.js b/test/multipart.test.js deleted file mode 100644 index 47c6a4f57..000000000 --- a/test/multipart.test.js +++ /dev/null @@ -1,869 +0,0 @@ -const fs = require('fs'); -const assert = require('assert'); -const { md5 } = require('utility'); -const mm = require('mm'); -const sinon = require('sinon'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; - -describe('test/multipart.test.js', () => { - // only run on v18 - if (!process.version.startsWith('v18.')) return; - - const { prefix } = utils; - const bucket = config.bucket; - let store; - before(async () => { - store = oss(config); - store.useBucket(bucket); - }); - - describe('listUploads()', () => { - beforeEach(async () => { - const result = await store.listUploads({ - 'max-uploads': 1000, - }); - const uploads = result.uploads || []; - await Promise.all(uploads.map(_ => store.abortMultipartUpload(_.name, _.uploadId))); - }); - - it('should list by key marker', async () => { - const name = `${prefix}multipart/list-key`; - const ids = ( - await Promise.all( - Array(5) - .fill(1) - .map((v, i) => store.initMultipartUpload(name + i)) - ) - ).map(_ => _.uploadId); - // list all uploads - let result = await store.listUploads({ - 'max-uploads': 10, - }); - const all = result.uploads.map(up => up.uploadId); - assert.deepEqual(all, ids); - - // after 1 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': name + 0, - }); - const after1 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after1, ids.slice(1)); - - // after 5 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': name + 4, - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5.length, 0); - }); - - it('should list by id marker', async () => { - const name = `${prefix}multipart/list-id`; - const ids = ( - await Promise.all( - Array(5) - .fill(1) - .map(_ => store.initMultipartUpload(name)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - // list all uploads - let result = await store.listUploads({ - 'max-uploads': 10, - }); - const all = result.uploads.map(up => up.uploadId); - assert.deepEqual(all, ids); - - // after 1: upload id marker alone is ignored - result = await store.listUploads({ - 'max-uploads': 10, - 'upload-id-marker': ids[1], - }); - const after1 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after1, ids); - - // after 5: upload id marker alone is ignored - result = await store.listUploads({ - 'max-uploads': 10, - 'upload-id-marker': ids[4], - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5, ids); - }); - - it('should list by id & key marker', async () => { - const fooName = `${prefix}multipart/list-foo`; - const fooIds = ( - await Promise.all( - Array(5) - .fill(1) - .map(_ => store.initMultipartUpload(fooName)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - const barName = `${prefix}multipart/list-bar`; - const barIds = ( - await Promise.all( - Array(5) - .fill(5) - .map(_ => store.initMultipartUpload(barName)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - // after 1 - let result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': barName, - 'upload-id-marker': barIds[0], - }); - const after1 = result.uploads.map(up => up.uploadId); - after1.sort(); - const sort1 = barIds.slice(1).concat(fooIds).sort(); - assert.deepEqual(after1, sort1); - - // after 5 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': barName, - 'upload-id-marker': barIds[4], - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5, fooIds); - }); - }); - - describe('multipartUpload()', () => { - afterEach(mm.restore); - - it('should initMultipartUpload with x-oss-server-side-encryption', async () => { - const name = 'multipart-x-oss-server-side-encryption'; - const result = await store.initMultipartUpload(name, { - headers: { - 'x-oss-server-side-encryption': 'AES256', - }, - }); - - assert.equal(result.res.headers['x-oss-server-side-encryption'], 'AES256'); - }); - - it('should multipartUpload with x-oss-server-side-encryption', async () => { - const name = 'multipart-x-oss-server-side-encryption'; - const fileName = await utils.createTempFile('multipart-fallback', 1003 * 1020); - const result = await store.multipartUpload(name, fileName, { - headers: { - 'x-oss-server-side-encryption': 'KMS', - }, - }); - assert.equal(result.res.headers['x-oss-server-side-encryption'], 'KMS'); - }); - - it('should fallback to putStream when file size is smaller than 100KB', async () => { - const fileName = await utils.createTempFile('multipart-fallback', 100 * 1024 - 1); - const name = `${prefix}multipart/fallback`; - let progress = 0; - - const putStreamSpy = sinon.spy(store, 'putStream'); - const uploadPartSpy = sinon.spy(store, '_uploadPart'); - - const result = await store.multipartUpload(name, fileName, { - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(putStreamSpy.callCount, 1); - assert.equal(uploadPartSpy.callCount, 0); - assert.equal(progress, 1); - - assert.equal(typeof result.bucket, 'string'); - assert.equal(typeof result.etag, 'string'); - - store.putStream.restore(); - store._uploadPart.restore(); - }); - - it('should use default partSize when not specified', () => { - const partSize = store._getPartSize(1024 * 1024, null); - assert.equal(partSize, 1 * 1024 * 1024); - }); - - it('should use user specified partSize', () => { - const partSize = store._getPartSize(1024 * 1024, 200 * 1024); - assert.equal(partSize, 200 * 1024); - }); - - it('should not exceeds max part number', () => { - const fileSize = 10 * 1024 * 1024 * 1024; - const maxNumParts = 10 * 1000; - - const partSize = store._getPartSize(fileSize, 100 * 1024); - assert.equal(partSize, Math.ceil(fileSize / maxNumParts)); - }); - - it('should upload file using multipart upload', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - let progress = 0; - const result = await store.multipartUpload(name, fileName, { - partSize: 100 * 1024, - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - const fileBuf = fs.readFileSync(fileName); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should upload file using multipart upload with exception', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file-exception`; - const clientTmp = oss(config); - clientTmp.useBucket(bucket); - - const stubUploadPart = sinon.stub(clientTmp, '_uploadPart'); - stubUploadPart.throws('TestUploadPartException'); - - let errorMsg; - let errPartNum; - try { - await clientTmp.multipartUpload(name, fileName); - } catch (err) { - errorMsg = err.message; - errPartNum = err.partNum; - } - assert.equal(errorMsg, 'Failed to upload some parts with error: TestUploadPartException part_num: 1'); - assert.equal(errPartNum, 1); - clientTmp._uploadPart.restore(); - }); - - it('should upload Node.js Buffer using multipart upload', async () => { - // create a buffer with 1M random data - const fileName = await utils.createTempFile('multipart-upload-buffer', 1024 * 1024); - const fileBuf = fs.readFileSync(fileName); - - const name = `${prefix}multipart/upload-buffer`; - const result = await store.multipartUpload(name, fileBuf, { - partSize: 100 * 1024, - }); - - assert.equal(result.res.status, 200); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should resume Node.js Buffer upload using checkpoint', async () => { - const uploadPart = store._uploadPart; - mm(store, '_uploadPart', function* (name, uploadId, partNo, data) { - if (partNo === 5) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-buffer', 1024 * 1024); - const fileBuf = fs.readFileSync(fileName); - - const name = `${prefix}multipart/upload-buffer`; - let lastCpt = {}; - let progress = 0; - try { - await store.multipartUpload(name, fileBuf, { - partSize: 100 * 1024, - progress(percent, cpt) { - progress++; - lastCpt = cpt; - }, - }); - // should not succeed - assert(false); - } catch (err) { - // pass - } - - mm.restore(); - const result = await store.multipartUpload(name, fileBuf, { - checkpoint: lastCpt, - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should resume upload using checkpoint', async () => { - const uploadPart = store._uploadPart; - mm(store, '_uploadPart', function* (name, uploadId, partNo, data) { - if (partNo === 5) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - const cptFile = '/tmp/.oss/cpt.json'; - let progress = 0; - try { - await store.multipartUpload(name, fileName, { - partSize: 100 * 1024, - progress(percent, cpt) { - progress++; - fs.writeFileSync(cptFile, JSON.stringify(cpt)); - }, - }); - // should not succeed - assert(false); - } catch (err) { - // pass - } - - mm.restore(); - const result = await store.multipartUpload(name, fileName, { - checkpoint: JSON.parse(fs.readFileSync(cptFile)), - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - const fileBuf = fs.readFileSync(fileName); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should return requestId in init, upload part, complete', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const result = await store.multipartUpload(name, fileName, { - progress(p, checkpoint, res) { - assert.equal(true, res && Object.keys(res).length !== 0); - }, - }); - assert.equal(true, result.res && Object.keys(result.res).length !== 0); - assert.equal(result.res.status, 200); - }); - - it('should upload with uploadPart', async () => { - const fileName = await utils.createTempFile('upload-with-upload-part', 10 * 100 * 1024); - - const name = `${prefix}multipart/upload-with-upload-part`; - - const init = await store.initMultipartUpload(name); - const { uploadId } = init; - const partSize = 100 * 1024; - const parts = await Promise.all( - Array(10) - .fill(1) - .map((v, i) => - store.uploadPart( - name, - uploadId, - i + 1, - fileName, - i * partSize, - Math.min((i + 1) * partSize, 10 * 100 * 1024) - ) - ) - ); - const dones = parts.map((_, i) => ({ - number: i + 1, - etag: _.etag, - })); - - const result = await store.completeMultipartUpload(name, uploadId, dones); - assert.equal(result.res.status, 200); - assert(result.data.Location.startsWith('https://')); - assert.equal(typeof result.data.Bucket, 'string'); - assert.equal(result.data.Key, name); - assert.equal(typeof result.data.ETag, 'string'); - }); - - it('should upload partSize be int number and greater then minPartSize', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - let progress = 0; - try { - const result = await store.multipartUpload(name, fileName, { - partSize: 14.56, - progress() { - progress++; - }, - }); - } catch (e) { - assert.equal('partSize must be int number', e.message); - } - - try { - await store.multipartUpload(name, fileName, { - partSize: 1, - progress() { - progress++; - }, - }); - } catch (e) { - assert.ok(e.message.startsWith('partSize must not be smaller')); - } - }); - - it('should skip doneParts when re-upload mutilpart files', async () => { - const PART_SIZE = 1024 * 100; - const FILE_SIZE = 1024 * 500; - const SUSPENSION_LIMIT = 3; - const object = `multipart-${Date.now()}`; - const fileName = await utils.createTempFile(object, FILE_SIZE); - const uploadPart = store._uploadPart; - let checkpoint; - mm(store, '_uploadPart', function(name, uploadId, partNo, data) { - if (partNo === SUSPENSION_LIMIT) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - try { - await store.multipartUpload(object, fileName, { - parallel: 1, - partSize: PART_SIZE, - progress: (percentage, c) => { - checkpoint = c; - }, - }); - } catch (e) { - assert.strictEqual(checkpoint.doneParts.length, SUSPENSION_LIMIT - 1); - } - mm.restore(); - const uploadPartSpy = sinon.spy(store, '_uploadPart'); - await store.multipartUpload(object, fileName, { - parallel: 1, - partSize: PART_SIZE, - checkpoint, - }); - assert.strictEqual(uploadPartSpy.callCount, FILE_SIZE / PART_SIZE - SUSPENSION_LIMIT + 1); - store._uploadPart.restore(); - }); - }); - - describe('requestError()', () => { - it('should request timeout exception', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('TestTimeoutErrorException'); - netErr.status = -2; - netErr.code = 'ConnectionTimeoutError'; - netErr.name = 'ConnectionTimeoutError'; - stubNetError.throws(netErr); - let timeoutErr; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - timeoutErr = err; - } - - assert.equal(true, timeoutErr && Object.keys(timeoutErr).length !== 0); - assert.equal(timeoutErr.status, -2); - store.urllib.request.restore(); - }); - - it('should request net exception', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('TestNetErrorException'); - netErr.status = -1; - netErr.code = 'RequestError'; - netErr.name = 'RequestError'; - stubNetError.throws(netErr); - - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - - assert.equal(true, netErr && Object.keys(netErrs).length !== 0); - assert.equal(netErrs.status, -1); - store.urllib.request.restore(); - }); - - it('should request throw ResponseTimeoutError', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('ResponseTimeoutError'); - netErr.status = -1; - netErr.code = 'ResponseTimeoutError'; - netErr.name = 'ResponseTimeoutError'; - stubNetError.throws(netErr); - - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - assert.strictEqual(netErrs.name, 'ResponseTimeoutError'); - store.urllib.request.restore(); - }); - - it('should request throw abort event', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - const stubNetError = sinon.stub(store, '_uploadPart'); - const netErr = new Error('Not Found'); - netErr.status = 404; - netErr.code = 'Not Found'; - netErr.name = 'Not Found'; - stubNetError.throws(netErr); - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - assert.strictEqual(netErrs.status, 0); - assert.strictEqual(netErrs.name, 'abort'); - store._uploadPart.restore(); - }); - }); - - describe('multipartCopy()', () => { - let fileName; - let name; - before(async () => { - fileName = await utils.createTempFile('multipart-upload-file-copy', 2 * 1024 * 1024); - name = `${prefix}multipart/upload-file-with-copy`; - await store.multipartUpload(name, fileName); - }); - - it('should multipart copy copy size err', async () => { - const file = await utils.createTempFile('multipart-upload-file', 50 * 1024); - const objectKey = `${prefix}multipart/upload-file-with-copy-small`; - await store.multipartUpload(objectKey, file); - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-small-new`; - let copyErr = null; - try { - await client.multipartUploadCopy(copyName, { - sourceKey: objectKey, - sourceBucketName: bucket, - }); - } catch (err) { - copyErr = err; - } - - assert.equal(copyErr.message, 'copySize must not be smaller than 102400'); - }); - - it('should multipart copy part size err', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - let partSizeErr = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 50 * 1024, - } - ); - } catch (err) { - partSizeErr = err; - } - - assert.equal(partSizeErr.message, 'partSize must not be smaller than 102400'); - }); - - it('should copy with upload part copy', async () => { - const client = store; - - // create a file with 1M random data - const fileNamez = await utils.createTempFile('multipart-upload-file-temp-copy', 10 * 100 * 1024); - - const key = `${prefix}multipart/upload-file-temp-copy`; - await client.multipartUpload(key, fileNamez); - - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - const sourceData = { - sourceKey: name, - sourceBucketName: bucket, - }; - const objectMeta = await client._getObjectMeta(sourceData.sourceBucketName, sourceData.sourceKey, {}); - const fileSize = objectMeta.res.headers['content-length']; - - const result = await client.initMultipartUpload(copyName); - - const partSize = 100 * 1024; // 100kb - const dones = []; - const uploadFn = async i => { - const start = partSize * (i - 1); - const end = Math.min(start + partSize, fileSize); - const range = `${start}-${end - 1}`; - const part = await store.uploadPartCopy(copyName, result.uploadId, i, range, sourceData, {}); - dones.push({ - number: i, - etag: part.res.headers.etag, - }); - }; - - await Promise.all( - Array(10) - .fill(1) - .map((v, i) => uploadFn(i + 1)) - ); - - const complete = await client.completeMultipartUpload(copyName, result.uploadId, dones); - - assert.equal(complete.res.status, 200); - }); - - it('should copy with multipart upload copy', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 256 * 1024, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart upload copy with parallel = 1', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-parallel-1`; - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 256 * 1024, - parallel: 1, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart copy with cancel and resume', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-cancel`; - let tempCheckpoint = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 100 * 1024, - progress(p, checkpoint) { - tempCheckpoint = checkpoint; - if (p > 0.5) { - client.cancel(); - } - }, - } - ); - } catch (err) { - assert.equal(client.isCancel(), true); - } - - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 100 * 1024, - checkpoint: tempCheckpoint, - progress(p) { - assert.equal(p > 0.5, true); - }, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart copy with exception', async () => { - const copyName = `${prefix}multipart/upload-file-with-copy-exception`; - const clientTmp = oss(config); - clientTmp.useBucket(bucket); - /* eslint no-unused-vars: [0] */ - const stubUploadPart = sinon.stub( - clientTmp, - 'uploadPartCopy', - async (objectKey, uploadId, partNo, range, sourceData, options) => { - if (partNo === 1) { - throw new Error('TestErrorException'); - } - } - ); - - let errorMsg; - let errPartNum; - try { - await clientTmp.multipartUploadCopy(copyName, { - sourceKey: name, - sourceBucketName: bucket, - }); - } catch (err) { - errorMsg = err.message; - errPartNum = err.partNum; - } - assert.equal(errorMsg, 'Failed to copy some parts with error: Error: TestErrorException part_num: 1'); - assert.equal(errPartNum, 1); - stubUploadPart.restore(); - }); - - it('should upload copy with list part', async () => { - const tempFileName = await utils.createTempFile('multipart-upload-list-part', 2 * 1024 * 1024); - const tempName = `${prefix}multipart/upload-list-part`; - await store.multipartUpload(tempName, tempFileName); - const client = store; - const copyName = `${prefix}multipart/upload-list-part-copy`; - let uploadIdz = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - parallel: 1, - partSize: 100 * 1024, - progress(p, checkpoint) { - if (p === 0) { - uploadIdz = checkpoint.uploadId; - } - if (p > 0.5) { - client.cancel(); - } - }, - } - ); - } catch (err) { - /* eslint no-empty: [0] */ - } - - const result = await store.listParts( - copyName, - uploadIdz, - { - 'max-parts': 1000, - }, - {} - ); - - assert.equal(result.res.status, 200); - }); - }); - - describe('multipartUploadStreams', () => { - afterEach(mm.restore); - it('multipartUploadStreams.length', async () => { - const uploadPart = store._uploadPart; - let i = 0; - const LIMIT = 1; - mm(store, '_uploadPart', function(name, uploadId, partNo, data) { - if (i === LIMIT) { - throw new Error('mock upload part fail.'); - } else { - i++; - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - const fileName = await utils.createTempFile(`multipart-upload-file-${Date.now()}`, 1024 * 1024); - const name = `${prefix}multipart/upload-file-${Date.now()}`; - const name1 = `${prefix}multipart/upload-file-1-${Date.now()}`; - try { - await Promise.all([ store.multipartUpload(name, fileName), store.multipartUpload(name1, fileName) ]); - } catch (e) {} - mm.restore(); - await Promise.all([ store.multipartUpload(name, fileName), store.multipartUpload(name1, fileName) ]); - assert.strictEqual(store.multipartUploadStreams.length, 0); - }); - - it('destroy the stream when multipartUploaded and the cancel method is called', async () => { - const fileName = await utils.createTempFile(`multipart-upload-file-${Date.now()}`, 1024 * 1024); - let stream; - mm(store, '_uploadPart', (_name, _uploadId, _partNo, data) => { - stream = data.stream; - throw new Error('mock upload part fail.'); - }); - - const name = `${prefix}multipart/upload-file-${Date.now()}`; - try { - await store.multipartUpload(name, fileName); - } catch (e) { - store.cancel(); - } - mm.restore(); - assert.strictEqual(stream.destroyed, true); - }); - }); -}); diff --git a/test/multiversion.test.js b/test/multiversion.test.js deleted file mode 100644 index 55cb4ce00..000000000 --- a/test/multiversion.test.js +++ /dev/null @@ -1,776 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const fs = require('fs'); -const ms = require('humanize-ms'); -const { metaSyncTime } = require('./config'); - -describe.skip('test/multiversion.test.js', () => { - const { prefix } = utils; - const enabled = 'Enabled'; - const suspended = 'Suspended'; - const bucket = config.bucket; - let store; - before(async () => { - store = oss(config); - store.useBucket(bucket); - // 用于产生versionId为null的版本 - await store.put('test-version-null', Buffer.from('test-version-null')); - }); - - describe('putBucketVersioning() getBucketVersioning()', () => { - it('should set bucket version', async () => { - try { - const result = await store.getBucketVersioning(bucket); - assert.strictEqual(result.versionStatus, undefined); - - const put1 = await store.putBucketVersioning(bucket, enabled); - assert.strictEqual(put1.status, 200); - const result1 = await store.getBucketVersioning(bucket); - assert.strictEqual(result1.versionStatus, enabled); - - const put2 = await store.putBucketVersioning(bucket, suspended); - assert.strictEqual(put2.status, 200); - const result2 = await store.getBucketVersioning(bucket); - assert.strictEqual(result2.versionStatus, suspended); - } catch (err) { - assert(false, err.message); - } - }); - }); - - describe('getBucketVersions()', () => { - before(async () => { - await store.putBucketVersioning(bucket, enabled); - const name = `${prefix}-getBucketVersions-file.js`; - const name1 = `${prefix}-getBucketVersions-file1.js`; - await store.put(name, __filename); - await store.delete(name); - await store.put(name, __filename); - await store.put(name1, __filename); - await store.delete(name1); - await store.put(name1, __filename); - }); - - it('should getBucketVersions', async () => { - try { - const result = await store.getBucketVersions(); - assert.strictEqual(result.res.status, 200); - assert.strictEqual(result.deleteMarker && result.deleteMarker.length, 2); - assert(result.objects && result.objects.length > 4); - } catch (err) { - assert(false, err.message); - } - }); - - it('should getBucketVersions with maxKeys', async () => { - try { - let result = await store.getBucketVersions({ - maxKeys: 3, - }); - assert(result.objects.length + result.deleteMarker.length === 3); - result = await store.getBucketVersions({ - maxKeys: 4, - }); - assert(result.objects.length + result.deleteMarker.length === 4); - } catch (err) { - assert(false, err.message); - } - }); - it('should getBucketVersions with delimiter', async () => { - const names = [ 'getBucketVersions/delimiter1.js', 'getBucketVersions/delimiter2.js', 'getBucketVersions/delimiter3.js', 'others.js' ]; - await Promise.all(names.map(_name => store.put(_name, __filename))); - try { - const result = await store.getBucketVersions({ - delimiter: '/', - }); - assert(result.prefixes && result.prefixes.includes('getBucketVersions/')); - } catch (err) { - assert(false, err.message); - } - }); - }); - - describe('putBucketLifecycle() getBucketLifecycle()', async () => { - it('should putBucketLifecycle with NoncurrentVersionExpiration', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [{ - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - noncurrentVersionExpiration: { - noncurrentDays: 1, - }, - }], { - timeout: 120000, - }); - await utils.sleep(ms(metaSyncTime)); - assert.strictEqual(putresult1.res.status, 200); - const { rules } = await store.getBucketLifecycle(bucket); - assert.strictEqual(rules[0].noncurrentVersionExpiration.noncurrentDays, '1'); - }); - it('should putBucketLifecycle with expiredObjectDeleteMarker', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [{ - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - expiredObjectDeleteMarker: 'true', - }, - NoncurrentVersionExpiration: { - noncurrentDays: 1, - }, - }]); - assert.equal(putresult1.res.status, 200); - const { rules } = await store.getBucketLifecycle(bucket); - assert.strictEqual(rules[0].expiration.expiredObjectDeleteMarker, 'true'); - }); - - it('should putBucketLifecycle with noncurrentVersionTransition', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - noncurrentVersionTransition: { - noncurrentDays: '10', - storageClass: 'IA', - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - const { rules } = await store.getBucketLifecycle(bucket); - const [ - { - noncurrentVersionTransition: { noncurrentDays, storageClass }, - }, - ] = rules; - assert(noncurrentDays === '10' && storageClass === 'IA'); - }); - }); - - describe('copy()', () => { - let versionId; - const name = `${prefix}-multiversion-copy-file.js`; - before(async () => { - await store.putBucketVersioning(bucket, enabled); - const result = await store.put(name, __filename); - await utils.sleep(ms(metaSyncTime)); - await store.delete(name); - versionId = result.res.headers['x-oss-version-id']; - }); - - // 指定version id进行拷贝,拷贝指定版本 - it('should copy', async () => { - const target = `${name.replace('file.js', 'file-target.js')}`; - try { - const result = await store.copy(target, name, { - versionId, - }); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - - // 不指定version id进行拷贝,拷贝最新版本 - it('should copy latest object when no versionId', async () => { - const target = `${name.replace('file.js', 'file-target-latest.js')}`; - const content = 'latest file'; - await store.put(name, Buffer.from(content)); - try { - const result = await store.copy(target, name); - assert.strictEqual(result.res.status, 200); - const targetRes = await store.get(target); - assert.strictEqual(targetRes.content.toString(), content); - } catch (error) { - assert(false); - } - }); - - // 暂停多版本, 进行copy, copy后object的versionId为null - it('should copy latest object with versionId `null` when the bucket is suspended', async () => { - const target = `${name.replace('file.js', 'file-target-suspended.js')}`; - const suspendedRes = await store.putBucketVersioning(bucket, suspended); - await utils.sleep(ms(metaSyncTime)); - assert.strictEqual(suspendedRes.res.status, 200); - try { - const result = await store.copy(target, name, { - versionId, - }); - assert.strictEqual(result.res.status, 200); - assert.strictEqual(result.res.headers['x-oss-version-id'], 'null'); - } catch (error) { - assert(false); - } - }); - }); - - describe('head()', () => { - const name = `${prefix}-multiversion-copy-file.js`; - let versionId; - before(async () => { - await store.putBucketVersioning(bucket, enabled); - const result = await store.put(name, __filename); - store.delete(name); - versionId = result.res.headers['x-oss-version-id']; - }); - - it('should head info', async () => { - try { - const result = await store.head(name, { versionId }); - assert.strictEqual(result.res.headers['x-oss-version-id'], versionId); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('multipartUploadCopy()', () => { - before(async () => { - await store.putBucketVersioning(bucket, enabled); - }); - - it('should multipartUploadCopy', async () => { - const file = await utils.createTempFile('multipart-upload-file', 102410); - const objectKey = `${prefix}multipart-copy-source.js`; - const { res: sourceRes } = await store.multipartUpload(objectKey, file); - const versionId = sourceRes.headers['x-oss-version-id']; - store.delete(objectKey); - const copyName = `${prefix}multipart-copy-target.js`; - try { - const result = await store.multipartUploadCopy(copyName, { - sourceKey: objectKey, - sourceBucketName: bucket, - }, { - versionId, - }); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('deleteMulti()', () => { - let name = `${prefix}-multiversion-deleteMulti-file.js`; - const arr = []; - before(async () => { - await store.putBucketVersioning(bucket, enabled); - let result; - const _createHistoryObject = async i => { - name = name.replace('file', `file${i}`); - result = await store.put(name, __filename); - await store.delete(name); - arr.push({ - key: name, - versionId: result.res.headers['x-oss-version-id'], - }); - }; - await Promise.all(Array(3).fill(1).map((_, i) => _createHistoryObject(i))); - }); - - it('should deleteMulti', async () => { - try { - const result = await store.deleteMulti(arr); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('restore()', () => { - const name = `${prefix}-multiversion-restore-file.js`; - let putResult; - let versionId; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - const headers = { - 'x-oss-storage-class': 'Archive', - }; - putResult = await store.put(name, __filename, { headers }); - versionId = putResult.res.headers['x-oss-version-id']; - }); - - it('should restore', async () => { - const head = await store.head(name); - assert.strictEqual(head.res.headers['x-oss-storage-class'], 'Archive'); - // 删除版本使成为历史版本 - await store.delete(name); - try { - const result = await store.restore(name, { - versionId, - }); - assert.strictEqual(result.res.status, 202); - - await store.restore(name, { - versionId, - }); - } catch (error) { - if (error.status === 409) { - assert(true); - } else { - assert(false); - } - } - }); - }); - - describe('putACL()', () => { - const name = `${prefix}-multiversion-putACL-file.js`; - let putResult; - let versionId; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - putResult = await store.put(name, __filename); - await store.delete(name); - versionId = putResult.res.headers['x-oss-version-id']; - }); - - it('should putACL', async () => { - try { - const result = await store.putACL(name, 'public-read', { - versionId, - }); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('getACL()', () => { - const name = `${prefix}-multiversion-getACL-file.js`; - let putResult; - let versionId; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - putResult = await store.put(name, __filename); - await store.delete(name); - versionId = putResult.res.headers['x-oss-version-id']; - }); - - it('should getACL', async () => { - try { - const result = await store.getACL(name, { - versionId, - }); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('getSymlink()', () => { - const name = `${prefix}-multiversion-symlink-file.js`; - const targetName = '/oss/target-测试.js'; - let versionId; - let result; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - }); - - it('should getSymlink', async () => { - try { - await store.put(targetName.replace('测试', '测试1'), __filename); - result = await store.putSymlink(name, targetName.replace('测试', '测试1')); - versionId = result.res.headers['x-oss-version-id']; - await store.put(targetName.replace('测试', '测试2'), __filename); - await store.putSymlink(name, targetName.replace('测试', '测试2')); - - result = await store.getSymlink(name, { - versionId, - }); - assert.strictEqual(result.res.status, 200); - } catch (error) { - assert(false); - } - }); - }); - - describe('get()', () => { - const name = `${prefix}oss-client/oss/get-multiversion.js`; - let putResult; - let versionId; - let delVersionId; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - putResult = await store.put(name, __filename); - const delres = await store.delete(name); // 删除当前版本,当前版本直接变为历史版本并且生成删除标记 - delVersionId = delres.res.headers['x-oss-version-id']; - versionId = putResult.res.headers['x-oss-version-id']; - }); - - // 指定版本, 且该版本不为删除标记 - it('should get with versionId', async () => { - const res = await store.get(name, { - versionId, - }); - assert.strictEqual(res.res.status, 200); - }); - - // // 指定版本, 且该版本为删除标记 - it('should throw error when version is deleter marker', async () => { - try { - await store.get(name, { - versionId: delVersionId, - }); - assert(false); - } catch (error) { - assert.strictEqual(error.status, 405); - } - }); - - // 不指定版本,且当前版本为删除标记 - it('should throw error, when no versionId and current version is deleter marker', async () => { - try { - await store.get(name); - assert(false); - } catch (error) { - assert.strictEqual(error.status, 404); - } - }); - - // 不指定版本,且当前版本不为删除标记 - it('should get latest object, when no versionId and current version is object', async () => { - const content = 'current version'; - await store.put(name, Buffer.from(content)); - const result = await store.get(name); - assert.strictEqual(result.content.toString(), content); - }); - }); - - describe('delete()', () => { - const name = `${prefix}oss-client/oss/delete-multiversion.js`; - let versionId; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - await store.put(name, __filename); - }); - - // 不指定version id,删除当前版本,生成DELETE标记 - it('should delete object without versionId', async () => { - await utils.sleep(ms(metaSyncTime)); - const res = await store.delete(name); - assert.strictEqual(res.res.headers['x-oss-delete-marker'], 'true'); - assert(res.res.headers['x-oss-version-id']); - }); - - // 指定version id,删除指定版本 - it('should delete object with versionId', async () => { - const result = await store.put(name, __filename); - versionId = result.res.headers['x-oss-version-id']; - const res = await store.delete(name, { - versionId, - }); - assert.strictEqual(res.res.headers['x-oss-version-id'], versionId); - }); - - // 指定versionId,删除DELETE标记恢复上一个版本 - it('should delete marker with versionId and restore lastest marker or object', async () => { - const result = await store.put(name, __filename); - // 版本versionId - versionId = result.res.headers['x-oss-version-id']; - // 删除版本 - const res = await store.delete(name); - // 标记versionId - const markerVersionId = res.res.headers['x-oss-version-id']; - // 删除标记 - await store.delete(name, { - versionId: markerVersionId, - }); - const headInfo = await store.head(name); - assert.strictEqual(headInfo.res.headers['x-oss-version-id'], versionId); - }); - - // 暂停多版本后,删除当前版本,当前版本不为null,为其生成一份历史版本,生成DELETE标记 - it('should delete, generate a historical version and generate a delete marker when suspended and current versionId not null', async () => { - await store.putBucketVersioning(bucket, enabled); - const currentName = `${name}suspended-delete`; - const result = await store.put(currentName, Buffer.from('suspended-delete')); - assert(result.res.headers['x-oss-version-id'] !== 'null'); - await store.putBucketVersioning(bucket, suspended); - // 删除当前版本 - const deleteRes = await store.delete(currentName); - assert.strictEqual(deleteRes.res.status, 204); - // 验证产生历史版本和删除标记 - const list = await store.getBucketVersions(); - assert(list.deleteMarker.find(v => v.name === currentName)); - assert(list.objects.find(v => v.name === currentName)); - }); - - // 暂停多版本后,删除当前版本,当前版本为null version,则直接删除,并生成DELETE标记 - it('should delete, generate a delete marker when suspended and current version is null', async () => { - await store.putBucketVersioning(bucket, enabled); - const currentName = 'version-null'; - await store.putBucketVersioning(bucket, suspended); - // 删除当前版本 - const deleteRes = await store.delete(currentName); // 相当于指定版本删除 - assert.strictEqual(deleteRes.res.status, 204); - // 验证未产生历史版本和产生删除标记 - const list = await store.getBucketVersions(); - assert(list.deleteMarker.find(v => v.name === currentName)); - assert(!list.objects.find(v => v.name === currentName)); - }); - - // 暂停多版本后,当前版本为DELETE标记,指定version删除该DELETE标记(包括null version),则恢复上一个版本 - it('should delete marker and restore lastest version when suspended ', async () => { - await store.putBucketVersioning(bucket, enabled); - try { - const currentName = 'delete-marker-test'; - const result = await store.put(currentName, Buffer.from(currentName)); - const currentVersionId = result.res.headers['x-oss-version-id']; - // 删除当前版本 产生标记 - const delRes = await store.delete(currentName); - const delVerionsId = delRes.res.headers['x-oss-version-id']; - await store.putBucketVersioning(bucket, suspended); - // 删除标记 - await store.delete(currentName, { - versionId: delVerionsId, - }); - // 验证是否恢复上一个版本 - const headInfo = await store.head(currentName); - assert.strictEqual(headInfo.res.headers['x-oss-version-id'], currentVersionId); - } catch (error) { - assert(false, error.message); - } - }); - }); - - describe('getBucketInfo()', () => { - it('should return bucket Versioning', async () => { - try { - await store.putBucketVersioning(bucket, enabled); - const result = await store.getBucketInfo(bucket); - assert.equal(result.res.status, 200); - assert.equal(result.bucket.Versioning, enabled); - } catch (error) { - assert(false, error.message); - } - }); - }); - - describe('getObjectTagging(), putObjectTagging(), deleteObjectTagging()', () => { - const name = `${prefix}-multiversion-tagging-file.js`; - let putResult; - let versionId; - let versionOpt; - - before(async () => { - await store.putBucketVersioning(bucket, enabled); - putResult = await store.put(name, __filename); - await store.delete(name); - versionId = putResult.res.headers['x-oss-version-id']; - versionOpt = { - versionId, - }; - }); - - it('should get the tags of object', async () => { - try { - const result = await store.getObjectTagging(name, versionOpt); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - - it('should configures or updates the tags of object', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - result = await store.putObjectTagging(name, tag, versionOpt); - assert.strictEqual(result.status, 200); - - result = await store.getObjectTagging(name, versionOpt); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - } catch (error) { - assert(false, error); - } - }); - - it('should delete the tags of object', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - await store.putObjectTagging(name, tag, versionOpt); - - result = await store.deleteObjectTagging(name, versionOpt); - assert.strictEqual(result.status, 204); - - result = await store.getObjectTagging(name, versionOpt); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - }); - - describe('getObjectMeta()', () => { - let name; - let resHeaders; - let fileSize; - let opt; - before(async () => { - await store.putBucketVersioning(bucket, enabled); - name = `${prefix}oss-client/oss/object-multiversion-meta.js`; - const object = await store.put(name, __filename); - fileSize = fs.statSync(__filename).size; - resHeaders = object.res.headers; - // 删除当前版本,创建历史版本 - await store.delete(name); - opt = { - versionId: object.res.headers['x-oss-version-id'], - }; - }); - - it('should return Etag and Content-Length', async () => { - try { - const info = await store.getObjectMeta(name, opt); - assert.equal(info.status, 200); - assert.equal(info.res.headers.etag, resHeaders.etag); - assert.equal(info.res.headers['content-length'], fileSize); - } catch (error) { - assert(false, error.message); - } - }); - }); - - describe('deleteMulti()', () => { - const names = []; - const versionIds = []; - beforeEach(async () => { - await store.putBucketVersioning(bucket, enabled); - let name = `${prefix}oss-client/oss/deleteMulti0.js`; - let result; - result = await store.put(name, __filename); - versionIds.push(result.res.headers['x-oss-version-id']); - names.push(name); - - name = `${prefix}oss-client/oss/deleteMulti1.js`; - result = await store.put(name, __filename); - versionIds.push(result.res.headers['x-oss-version-id']); - names.push(name); - - name = `${prefix}oss-client/oss/deleteMulti2.js`; - result = await store.put(name, __filename); - versionIds.push(result.res.headers['x-oss-version-id']); - names.push(name); - }); - - it('should delete', async () => { - try { - // 不指定版本 批量删除,产生历史版本和删除标记 - let result; - result = await store.deleteMulti(names); - const markerVersionId = result.deleted.map(v => v.DeleteMarkerVersionId); - assert.strictEqual(result.res.status, 200); - assert.strictEqual(result.deleted.map(v => v.Key).sort().toString(), names.sort().toString()); - assert.strictEqual(result.deleted.filter(v => v.DeleteMarker).length, result.deleted.length); - - // 指定版本 批量删除历史版本文件,永久删除 - const delNameObjArr = names.map((_, index) => ({ - key: _, - versionId: versionIds[index], - })); - result = await store.deleteMulti(delNameObjArr); - assert.strictEqual(result.res.status, 200); - assert.strictEqual(result.deleted.map(v => v.Key).sort().toString(), names.sort().toString()); - - // 指定版本 批量删除标记 - const delNameMarkerArr = names.map((_, index) => ({ - key: _, - versionId: markerVersionId[index], - })); - result = await store.deleteMulti(delNameMarkerArr); - assert.strictEqual(result.res.status, 200); - assert.strictEqual(result.deleted.map(v => v.Key).sort().toString(), names.sort().toString()); - assert.strictEqual(result.deleted.filter(v => v.DeleteMarker).length, result.deleted.length); - } catch (error) { - assert(false, error.message); - } - }); - }); - - describe('uploadPartCopy()', () => { - let fileName; - let sourceName; - let versionId; - before(async () => { - await store.putBucketVersioning(bucket, enabled); - fileName = await utils.createTempFile( - 'multipart-upload-file-copy', - 2 * 1024 * 1024 - ); - sourceName = `${prefix}multipart/upload-file-with-copy`; - const res = await store.multipartUpload(sourceName, fileName); - // versionId - versionId = res.res.headers['x-oss-version-id']; - // 删除当前版本 - await store.delete(sourceName); - }); - - it('should copy with upload part copy', async () => { - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - const sourceData = { - sourceKey: sourceName, - sourceBucketName: bucket, - }; - const objectMeta = await store._getObjectMeta( - sourceData.sourceBucketName, - sourceData.sourceKey, - { - versionId, - } - ); - const fileSize = objectMeta.res.headers['content-length']; - - const result = await store.initMultipartUpload(copyName); - - const partSize = 100 * 1024; // 100kb - const dones = []; - - const uploadFn = async i => { - const start = partSize * (i - 1); - const end = Math.min(start + partSize, fileSize); - const range = `${start}-${end - 1}`; - const part = await store.uploadPartCopy( - copyName, - result.uploadId, - i, - range, - sourceData, - { versionId } - ); - dones.push({ - number: i, - etag: part.res.headers.etag, - }); - }; - await Promise.all(Array(10).fill(1).map((v, i) => uploadFn(i + 1))); - - const complete = await store.completeMultipartUpload( - copyName, - result.uploadId, - dones - ); - - assert.equal(complete.res.status, 200); - }); - }); -}); diff --git a/test/object.test.js b/test/object.test.js deleted file mode 100644 index 0c15bf54f..000000000 --- a/test/object.test.js +++ /dev/null @@ -1,2470 +0,0 @@ -const fs = require('fs'); -const { readFile, writeFile } = require('fs/promises'); -const path = require('path'); -const assert = require('assert'); -const os = require('os'); -const { Readable } = require('stream'); -const ms = require('humanize-ms'); -const urllib = require('urllib'); -const copy = require('copy-to'); -const mm = require('mm'); -const crypto = require('crypto'); -const urlutil = require('url'); -const { metaSyncTime, oss: config } = require('./config'); -const utils = require('./utils'); -const oss = require('..'); - -describe('test/object.test.js', () => { - const tmpdir = os.tmpdir(); - const { prefix } = utils; - const bucket = config.bucket; - let store; - // let archvieBucket; - before(async () => { - store = oss(config); - // just for archive bucket test - // archvieBucket = `oss-client-archvie-bucket-${prefix.replace(/[/.]/g, '-')}`; - // archvieBucket = archvieBucket.substring(0, archvieBucket.length - 1); - store.useBucket(bucket); - // await store.putBucket(archvieBucket, { StorageClass: 'Archive' }); - // store.useBucket(archvieBucket, bucketRegion); - }); - - afterEach(mm.restore); - - describe('putStream()', () => { - it('should add object with streaming way', async () => { - const name = `${prefix}oss-client/oss/putStream-localfile.js`; - const object = await store.putStream(name, fs.createReadStream(__filename)); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - assert(object.url); - - // check content - const r = await store.get(name); - assert.equal(r.res.status, 200); - assert(r.res.timing.contentDownload > 0); - assert.equal(r.content.toString(), await readFile(__filename, 'utf8')); - }); - - it('should add image with file streaming way', async () => { - const name = `${prefix}oss-client/oss/nodejs-1024x768.png`; - const imagepath = path.join(__dirname, 'nodejs-1024x768.png'); - const object = await store.putStream(name, fs.createReadStream(imagepath), { - mime: 'image/png', - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - - // check content - const r = await store.get(name); - // console.log(r.res.headers); - // { - // server: 'AliyunOSS', - // date: 'Sat, 22 Oct 2022 13:25:55 GMT', - // 'content-type': 'image/png', - // 'content-length': '502182', - // connection: 'keep-alive', - // 'x-oss-request-id': '6353EF633DE20A809D8088EA', - // 'accept-ranges': 'bytes', - // etag: '"39D12ED73B63BAAC31F980F555AE4FDE"', - // 'last-modified': 'Sat, 22 Oct 2022 13:25:55 GMT', - // 'x-oss-object-type': 'Normal', - // 'x-oss-hash-crc64ecma': '8835162692478804631', - // 'x-oss-storage-class': 'Standard', - // 'content-md5': 'OdEu1ztjuqwx+YD1Va5P3g==', - // 'x-oss-server-time': '14' - // } - assert.equal(r.res.status, 200); - assert.equal(r.res.headers['content-type'], 'image/png'); - const buf = await readFile(imagepath); - assert.equal(r.res.headers['content-length'], `${buf.length}`); - assert.equal(r.content.length, buf.length); - assert.deepEqual(r.content, buf); - }); - - it('should put object with http streaming way', async () => { - const name = `${prefix}oss-client/oss/nodejs-1024x768.png`; - const nameCpy = `${prefix}oss-client/oss/nodejs-1024x768`; - const imagepath = path.join(__dirname, 'nodejs-1024x768.png'); - await store.putStream(name, fs.createReadStream(imagepath), { mime: 'image/png' }); - const signUrl = await store.signatureUrl(name, { expires: 3600 }); - const { res: httpStream } = await urllib.request(signUrl, { - dataType: 'stream', - }); - let result = await store.putStream(nameCpy, httpStream); - assert.equal(result.res.status, 200); - result = await store.get(nameCpy); - assert.equal(result.res.status, 200); - assert.equal(result.res.headers['content-type'], 'application/octet-stream'); - assert.equal(result.res.headers['content-length'], httpStream.headers['content-length']); - }); - - it('should add very big file: 4mb with streaming way', async () => { - const name = `${prefix}oss-client/oss/bigfile-4mb.bin`; - const bigfile = path.join(tmpdir, 'bigfile-4mb.bin'); - await writeFile(bigfile, Buffer.alloc(4 * 1024 * 1024).fill('a\n')); - const object = await store.putStream(name, fs.createReadStream(bigfile)); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - - // check content - const r = await store.get(name); - assert.equal(r.res.status, 200); - assert.equal(r.res.headers['content-type'], 'application/octet-stream'); - assert.equal(r.res.size, 4 * 1024 * 1024); - const buf = await readFile(bigfile); - assert.equal(r.content.length, buf.length); - assert.deepEqual(r.content, buf); - }); - - it('should throw error with stream destroy', async () => { - const name = `${prefix}oss-client/oss/putStream-source-destroy.js`; - await assert.rejects(async () => { - const readerStream = fs.createReadStream(`${__filename}.notexists.js`); - await store.putStream(name, readerStream); - }, err => { - assert.strictEqual(err.status, -1); - return true; - }); - }); - }); - - describe('processObjectSave()', () => { - const name = 'sourceObject.png'; - const target = `processObject_target${Date.now()}.jpg`; - before(async () => { - const imagepath = path.join(__dirname, 'nodejs-1024x768.png'); - await store.putStream(name, fs.createReadStream(imagepath), { - mime: 'image/png', - }); - }); - - it('should process image', async () => { - const result = await store.processObjectSave( - name, - target, - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,' - ); - assert.equal(result.res.status, 200); - assert.equal(result.status, 200); - }); - - // it('should process image with targetBucket', async () => { - // try { - // const result = await store.processObjectSave( - // name, - // target, - // 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,', - // archvieBucket - // ); - // assert.strictEqual(result.res.status, 200); - // } catch (error) { - // assert(false, error); - // } - // }); - - it('should throw error when sourceObjectName is invalid', async () => { - await assert.rejects(async () => { - await store.processObjectSave('', target, - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,'); - }, err => { - assert.equal(err.message, 'sourceObject is required'); - return true; - }); - await assert.rejects(async () => { - await store.processObjectSave({}, target, - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,'); - }, err => { - assert.equal(err.message, 'sourceObject must be String'); - return true; - }); - }); - - it('should throw error when targetObjectName is invalid', async () => { - await assert.rejects(async () => { - await store.processObjectSave(name, '', - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,'); - }, err => { - assert.equal(err.message, 'targetObject is required'); - return true; - }); - - await assert.rejects(async () => { - await store.processObjectSave(name, {}, - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,'); - }, err => { - assert.equal(err.message, 'targetObject must be String'); - return true; - }); - }); - - it('should throw error when process is invalid', async () => { - await assert.rejects(async () => { - await store.processObjectSave(name, target, ''); - }, err => { - assert.equal(err.message, 'process is required'); - return true; - }); - - await assert.rejects(async () => { - await store.processObjectSave(name, target, {}); - }, err => { - assert.equal(err.message, 'process must be String'); - return true; - }); - }); - }); - - describe('getObjectUrl()', () => { - it('should return object url', () => { - let name = 'test.js'; - let url = store.getObjectUrl(name); - assert.equal(url, store.options.endpoint.format() + name); - - name = '/foo/bar/a%2Faa/test&+-123~!.js'; - url = store.getObjectUrl(name, 'https://foo.com'); - assert.equal(url, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); - const url2 = store.getObjectUrl(name, 'https://foo.com/'); - assert.equal(url2, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); - }); - }); - - describe('generateObjectUrl()', () => { - it('should return object url', () => { - let name = 'test.js'; - let url = store.generateObjectUrl(name); - - let baseUrl = store.options.endpoint.format(); - const copyUrl = urlutil.parse(baseUrl); - copyUrl.hostname = `${bucket}.${copyUrl.hostname}`; - copyUrl.host = `${bucket}.${copyUrl.host}`; - baseUrl = copyUrl.format(); - assert.equal(url, `${baseUrl}${name}`); - - name = '/foo/bar/a%2Faa/test&+-123~!.js'; - url = store.generateObjectUrl(name, 'https://foo.com'); - assert.equal(url, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); - const url2 = store.generateObjectUrl(name, 'https://foo.com/'); - assert.equal(url2, 'https://foo.com/foo/bar/a%252Faa/test%26%2B-123~!.js'); - }); - }); - - describe('put()', () => { - it('should add object with local file path', async () => { - const name = `${prefix}oss-client/oss/put-localfile.js`; - const object = await store.put(name, __filename); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - }); - - it('should with options.ctx', async () => { - const name = `${prefix}oss-client/oss/put-localfile-options-ctx.js`; - let ctx = { - httpclient: {}, - }; - await assert.rejects(async () => { - await store.put(name, __filename, { ctx }); - }, err => { - assert(err.message.includes('raw error: TypeError: urllib.request is not a function')); - return true; - }); - ctx = { - httpclient: urllib, - }; - let object = await store.put(name, __filename, { ctx }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - - ctx = { - urllib, - }; - object = await store.put(name, __filename, { ctx }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - }); - - it('should add object with content buffer', async () => { - const name = `${prefix}oss-client/oss/put-buffer`; - const object = await store.put(`/${name}`, Buffer.from('foo content')); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.name, name); - }); - - it('should add object with readstream', async () => { - const name = `${prefix}oss-client/oss/put-readstream`; - const object = await store.put(name, fs.createReadStream(__filename)); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(typeof object.res.headers.etag, 'string'); - assert.equal(object.name, name); - }); - - it('should add object with Readable', async () => { - const name = `${prefix}oss-client/oss/put-Readable`; - async function* generate() { - yield 'Hello, '; - yield '你好 OSS'; - } - // Using stream.Readable.from() method - const readable = Readable.from(generate()); - const object = await store.put(name, readable, { - headers: { - 'content-length': Buffer.byteLength('Hello, 你好 OSS', 'utf-8'), - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(typeof object.res.headers.etag, 'string'); - assert.equal(object.name, name); - const result = await store.get(name); - assert.equal(result.content.toString(), 'Hello, 你好 OSS'); - }); - - it('should add object with meta', async () => { - const name = `${prefix}oss-client/oss/put-meta.js`; - const object = await store.put(name, __filename, { - meta: { - uid: 1, - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - - const info = await store.head(name); - assert.deepEqual(info.meta, { - uid: '1', - slus: 'test.html', - }); - assert.equal(info.status, 200); - }); - - it('should set Content-Disposition with ascii name', async () => { - const name = `${prefix}oss-client/oss/put-Content-Disposition.js`; - const object = await store.put(name, __filename, { - headers: { - 'Content-Disposition': 'ascii-name.js', - }, - }); - assert(object.name, name); - const info = await store.head(name); - assert.equal(info.res.headers['content-disposition'], 'ascii-name.js'); - }); - - it('should set Content-Disposition with no-ascii name', async () => { - const name = `${prefix}oss-client/oss/put-Content-Disposition.js`; - const object = await store.put(name, __filename, { - headers: { - 'Content-Disposition': encodeURIComponent('non-ascii-名字.js'), - }, - }); - assert(object.name, name); - const info = await store.head(name); - assert.equal(info.res.headers['content-disposition'], 'non-ascii-%E5%90%8D%E5%AD%97.js'); - }); - - it('should set Expires', async () => { - const name = `${prefix}oss-client/oss/put-Expires.js`; - const object = await store.put(name, __filename, { - headers: { - Expires: 1000000, - }, - }); - assert(object.name, name); - const info = await store.head(name); - assert.equal(info.res.headers.expires, '1000000'); - }); - - it('should set custom Content-Type', async () => { - const name = `${prefix}oss-client/oss/put-Content-Type.js`; - const object = await store.put(name, __filename, { - headers: { - 'Content-Type': 'text/plain; charset=gbk', - }, - }); - assert(object.name, name); - const info = await store.head(name); - assert.equal(info.res.headers['content-type'], 'text/plain; charset=gbk'); - }); - - it('should set custom content-type lower case', async () => { - const name = `${prefix}oss-client/oss/put-Content-Type.js`; - const object = await store.put(name, __filename, { - headers: { - 'content-type': 'application/javascript; charset=utf8', - }, - }); - assert(object.name, name); - const info = await store.head(name); - assert.equal(info.res.headers['content-type'], 'application/javascript; charset=utf8'); - }); - - it('should set custom Content-MD5 and ignore case', async () => { - const name = `test-md5-${Date.now()}.js`; - const fileName = await utils.createTempFile(name, 1024 * 4); - const MD5Value = crypto.createHash('md5').update(fs.readFileSync(fileName)).digest('base64'); - await store.put(name, fileName, { - headers: { - 'Content-MD5': MD5Value, - }, - }); - await store.put(name, fileName, { - headers: { - 'content-Md5': MD5Value, - }, - }); - }); - - it('should return correct encode when name include + and space', async () => { - const name = 'ali-sdkhahhhh+oss+mm xxx.js'; - const object = await store.put(name, __filename, { - headers: { - 'Content-Type': 'text/plain; charset=gbk', - }, - }); - assert(object.name, name); - const info = await store.head(name); - const url = info.res.requestUrls[0]; - const { pathname } = urlutil.parse(url); - assert.equal(pathname, '/ali-sdkhahhhh%2Boss%2Bmm%20xxx.js'); - assert.equal(info.res.headers['content-type'], 'text/plain; charset=gbk'); - }); - - it('PUTs object with same name to a bucket', async () => { - const body = Buffer.from('san'); - const name = `${prefix}put/testsan`; - const resultPut = await store.put(name, body); - assert.equal(resultPut.res.status, 200); - await assert.rejects(async () => { - await store.put(name, body, { - headers: { 'x-oss-forbid-overwrite': 'true' }, - }); - }, err => { - assert.equal(err.name, 'FileAlreadyExistsError'); - assert.equal(err.message, 'The object you specified already exists and can not be overwritten.'); - return true; - }); - }); - - it('should throw error when path is not file ', async () => { - const file = __dirname; - const name = `${prefix}put/testpathnotfile`; - await assert.rejects(async () => { - await store.put(name, file); - }, err => { - assert.equal(`${__dirname} is not file`, err.message); - return true; - }); - }); - }); - - describe('test-content-type', () => { - it('should put object and content-type not null when upload file and object name has no MIME', async () => { - const name = `${prefix}oss-client/oss/test-content-type`; - const bigfile = path.join(tmpdir, 'test-content-type'); - await writeFile(bigfile, Buffer.alloc(4 * 1024).fill('a\n')); - const object = await store.put(name, bigfile); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - assert.equal(typeof object.res.rt, 'number'); - assert.equal(object.res.size, 0); - assert.equal(object.name, name); - - const r = await store.get(name); - assert.equal(r.res.status, 200); - assert.equal(r.res.headers['content-type'], 'application/octet-stream'); - }); - }); - - describe('mimetype', () => { - const createFile = async (name, size) => { - size = size || 200 * 1024; - await new Promise((resolve, reject) => { - const rs = fs.createReadStream('/dev/random', { - start: 0, - end: size - 1, - }); - const ws = fs.createWriteStream(name); - rs.pipe(ws); - ws.on('finish', (err, res) => { - if (err) { - reject(err); - } else { - resolve(res); - } - }); - }); - - return name; - }; - - it('should set mimetype by file ext', async () => { - const filepath = path.join(tmpdir, 'content-type-by-file.jpg'); - await createFile(filepath); - const name = `${prefix}oss-client/oss/content-type-by-file.png`; - await store.put(name, filepath); - - let result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'image/jpeg'); - - await store.multipartUpload(name, filepath); - result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'image/jpeg'); - }); - - it('should set mimetype by object key', async () => { - const filepath = path.join(tmpdir, 'content-type-by-file'); - await createFile(filepath); - const name = `${prefix}oss-client/oss/content-type-by-file.png`; - await store.put(name, filepath); - - let result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'image/png'); - await store.multipartUpload(name, filepath); - result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'image/png'); - }); - - it('should set user-specified mimetype', async () => { - const filepath = path.join(tmpdir, 'content-type-by-file.jpg'); - await createFile(filepath); - const name = `${prefix}oss-client/oss/content-type-by-file.png`; - await store.put(name, filepath, { mime: 'text/plain' }); - - let result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'text/plain'); - await store.multipartUpload(name, filepath, { - mime: 'text/plain', - }); - result = await store.head(name); - assert.equal(result.res.headers['content-type'], 'text/plain'); - }); - }); - - describe('head()', () => { - let name; - let resHeaders; - before(async () => { - name = `${prefix}oss-client/oss/head-meta.js`; - const object = await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - resHeaders = object.res.headers; - }); - - it('should head not exists object throw NoSuchKeyError', async () => { - await assert.rejects( - async () => { - await store.head(`${name}not-exists`); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.status, 404); - assert.equal(typeof err.requestId, 'string'); - return true; - } - ); - }); - - it('should head exists object with If-Modified-Since < object modified time', async () => { - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - const info = await store.head(name, { - headers: { - 'If-Modified-Since': lastYear, - }, - }); - assert.equal(info.status, 200); - assert(info.meta); - }); - - it('should head exists object with If-Modified-Since = object modified time', async () => { - const info = await store.head(name, { - headers: { - 'If-Modified-Since': resHeaders.date, - }, - }); - assert.equal(info.status, 304); - assert.equal(info.meta, null); - }); - - it('should head exists object with If-Modified-Since > object modified time', async () => { - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - - const info = await store.head(name, { - headers: { - 'If-Modified-Since': nextYear, - }, - }); - assert.equal(info.status, 304); - assert.equal(info.meta, null); - }); - - it('should head exists object with If-Unmodified-Since < object modified time', async () => { - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - await assert.rejects( - async () => { - await store.head(name, { - headers: { - 'If-Unmodified-Since': lastYear, - }, - }); - }, - err => { - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal(err.status, 412); - return true; - } - ); - }); - - it('should head exists object with If-Unmodified-Since = object modified time', async () => { - const info = await store.head(name, { - headers: { - 'If-Unmodified-Since': resHeaders.date, - }, - }); - assert.equal(info.status, 200); - assert(info.meta); - }); - - it('should head exists object with If-Unmodified-Since > object modified time', async () => { - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - - const info = await store.head(name, { - headers: { - 'If-Unmodified-Since': nextYear, - }, - }); - assert.equal(info.status, 200); - assert(info.meta); - }); - - it('should head exists object with If-Match equal etag', async () => { - const info = await store.head(name, { - headers: { - 'If-Match': resHeaders.etag, - }, - }); - assert.equal(info.meta.uid, '1'); - assert.equal(info.meta.pid, '123'); - assert.equal(info.meta.slus, 'test.html'); - assert.equal(info.status, 200); - }); - - it('should head exists object with If-Match not equal etag', async () => { - await assert.rejects( - async () => { - await store.head(name, { - headers: { - 'If-Match': '"foo-etag"', - }, - }); - }, - err => { - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal(err.status, 412); - return true; - } - ); - }); - - it('should head exists object with If-None-Match equal etag', async () => { - const info = await store.head(name, { - headers: { - 'If-None-Match': resHeaders.etag, - }, - }); - assert.equal(info.meta, null); - assert.equal(info.status, 304); - }); - - it('should head exists object with If-None-Match not equal etag', async () => { - const info = await store.head(name, { - headers: { - 'If-None-Match': '"foo-etag"', - }, - }); - assert.equal(info.meta.uid, '1'); - assert.equal(info.meta.pid, '123'); - assert.equal(info.meta.slus, 'test.html'); - assert.equal(info.status, 200); - }); - }); - - describe('getObjectMeta()', () => { - let name; - let resHeaders; - let fileSize; - before(async () => { - name = `${prefix}oss-client/oss/object-meta.js`; - const object = await store.put(name, __filename); - fileSize = fs.statSync(__filename).size; - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - resHeaders = object.res.headers; - }); - - it('should head not exists object throw NoSuchKeyError', async () => { - await assert.rejects( - async () => { - await store.head(`${name}not-exists`); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.status, 404); - assert.equal(typeof err.requestId, 'string'); - return true; - } - ); - }); - - it('should return Etag and Content-Length', async () => { - const info = await store.getObjectMeta(name); - assert.equal(info.status, 200); - assert.equal(info.res.headers.etag, resHeaders.etag); - assert.equal(info.res.headers['content-length'], fileSize); - }); - }); - - describe('get()', () => { - let name; - let resHeaders; - let needEscapeName; - before(async () => { - name = `${prefix}oss-client/oss/get-meta.js`; - let object = await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - resHeaders = object.res.headers; - - needEscapeName = `${prefix}oss-client/oss/%3get+meta.js`; - object = await store.put(needEscapeName, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - }); - - it('should store object to local file', async () => { - const savepath = path.join(tmpdir, name.replace(/\//g, '-')); - const result = await store.get(name, savepath); - assert.equal(result.res.status, 200); - assert(!result.res.requestUrls[0].includes('response-cache-control=no-cache')); - assert.equal(fs.statSync(savepath).size, fs.statSync(__filename).size); - }); - - it('should escape uri path ok', async () => { - const savepath = path.join(tmpdir, needEscapeName.replace(/\//g, '-')); - const result = await store.get(needEscapeName, savepath); - assert.equal(result.res.status, 200); - assert.equal(fs.statSync(savepath).size, fs.statSync(__filename).size); - }); - - it.skip('should throw error when save path parent dir not exists', async () => { - const savepath = path.join(tmpdir, 'not-exists', name.replace(/\//g, '-')); - await assert.rejects(async () => { - await store.get(name, savepath); - }, err => { - assert(err.message.includes('ENOENT')); - return true; - }); - }); - - it('should store object to writeStream', async () => { - const savepath = path.join(tmpdir, name.replace(/\//g, '-')); - const result = await store.get(name, fs.createWriteStream(savepath)); - assert.equal(result.res.status, 200); - assert.equal(fs.statSync(savepath).size, fs.statSync(__filename).size); - }); - - it('should store not exists object to file', async () => { - const savepath = path.join(tmpdir, name.replace(/\//g, '-')); - await assert.rejects( - async () => { - await store.get(`${name}not-exists`, savepath); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.status, 404); - assert(!fs.existsSync(savepath)); - return true; - } - ); - }); - - it.skip('should throw error when writeStream emit error', async () => { - const savepath = path.join(tmpdir, 'not-exists-dir', name.replace(/\//g, '-')); - await assert.rejects(async () => { - await store.get(name, fs.createWriteStream(savepath)); - }, err => { - console.error(err); - return true; - }); - }); - - it('should get object content buffer', async () => { - let result = await store.get(name); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); - - result = await store.get(name, null); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); - }); - - it('should get object content buffer with image process', async () => { - const imageName = `${prefix}oss-client/oss/nodejs-test-get-image-1024x768.png`; - const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); - path.join(__dirname, 'nodejs-processed-w200.png'); - await store.put(imageName, originImagePath, { - mime: 'image/png', - }); - - let result = await store.get(imageName, { process: 'image/resize,w_200' }); - assert.equal(result.res.status, 200); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - // assert.deepEqual(result.content == fs.readFileSync(processedImagePath), - // 'get content should be same as test/nodejs-processed-w200.png'); - - // it should use the value of process - // when 'subres.x-oss-process' coexists with 'process'. - result = await store.get(imageName, { - process: 'image/resize,w_200', - subres: { 'x-oss-process': 'image/resize,w_100' }, - }); - assert.equal(result.res.status, 200); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - }); - - it('should throw NoSuchKeyError when object not exists', async () => { - await assert.rejects( - async () => { - await store.get('not-exists-key'); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.status, 404); - assert.equal(typeof err.requestId, 'string'); - assert.equal(err.message, 'The specified key does not exist.'); - return true; - } - ); - }); - - describe('If-Modified-Since header', () => { - it('should 200 when If-Modified-Since < object modified time', async () => { - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - const result = await store.get(name, { - headers: { - 'If-Modified-Since': lastYear, - }, - }); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); - assert.equal(result.res.status, 200); - }); - - it('should 304 when If-Modified-Since = object modified time', async () => { - const result = await store.get(name, { - headers: { - 'If-Modified-Since': resHeaders.date, - }, - }); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert.equal(result.content.length, 0); - assert.equal(result.res.status, 304); - }); - - it('should 304 when If-Modified-Since > object modified time', async () => { - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - const result = await store.get(name, { - headers: { - 'If-Modified-Since': nextYear, - }, - }); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert.equal(result.content.length, 0); - assert.equal(result.res.status, 304); - }); - }); - - describe('If-Unmodified-Since header', () => { - it('should throw PreconditionFailedError when If-Unmodified-Since < object modified time', async () => { - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - await assert.rejects( - async () => { - await store.get(name, { - headers: { - 'If-Unmodified-Since': lastYear, - }, - }); - }, - err => { - assert.equal(err.status, 412); - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal( - err.message, - 'At least one of the pre-conditions you specified did not hold. (condition: If-Unmodified-Since)' - ); - assert.equal(typeof err.requestId, 'string'); - assert.equal(typeof err.hostId, 'string'); - return true; - } - ); - }); - - it('should 200 when If-Unmodified-Since = object modified time', async () => { - const result = await store.get(name, { - headers: { - 'If-Unmodified-Since': resHeaders.date, - }, - }); - assert.equal(result.res.status, 200); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); - }); - - it('should 200 when If-Unmodified-Since > object modified time', async () => { - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - const result = await store.get(name, { - headers: { - 'If-Unmodified-Since': nextYear, - }, - }); - assert.equal(result.res.status, 200); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert(result.content.toString().indexOf('oss-client/oss/get-meta.js') > 0); - }); - }); - - describe('If-Match header', () => { - it('should 200 when If-Match equal object etag', async () => { - const result = await store.get(name, { - headers: { - 'If-Match': resHeaders.etag, - }, - }); - assert.equal(result.res.status, 200); - }); - - it('should throw PreconditionFailedError when If-Match not equal object etag', async () => { - await assert.rejects( - async () => { - await store.get(name, { - headers: { - 'If-Match': 'foo', - }, - }); - }, - err => { - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal(err.status, 412); - return true; - } - ); - }); - }); - - describe('If-None-Match header', () => { - it('should 200 when If-None-Match not equal object etag', async () => { - const result = await store.get(name, { - headers: { - 'If-None-Match': 'foo', - }, - }); - assert.equal(result.res.status, 200); - }); - - it('should 304 when If-None-Match equal object etag', async () => { - const result = await store.get(name, { - headers: { - 'If-None-Match': resHeaders.etag, - }, - }); - assert.equal(result.res.status, 304); - assert.equal(result.content.length, 0); - }); - }); - - describe('Range header', () => { - it('should work with Range header and get top 10 bytes content', async () => { - const content = Buffer.from('aaaaaaaaaabbbbbbbbbb'); - await store.put('range-header-test', content); - const result = await store.get('range-header-test', { - headers: { - Range: 'bytes=0-9', - }, - }); - assert.equal(result.res.headers['content-length'], '10'); - assert(Buffer.isBuffer(result.content), 'content should be Buffer'); - assert.equal(result.content.toString(), 'aaaaaaaaaa'); - }); - }); - }); - - describe('signatureUrl()', () => { - let name; - let needEscapeName; - before(async () => { - name = `${prefix}oss-client/oss/signatureUrl.js`; - let object = await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - - needEscapeName = `${prefix}oss-client/oss/%3get+meta-signatureUrl.js`; - object = await store.put(needEscapeName, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - }); - - it('should signature url get object ok', async () => { - const result = await store.get(name); - const url = store.signatureUrl(name); - const urlRes = await urllib.request(url); - assert.equal(urlRes.data.toString(), result.content.toString()); - }); - - it('should signature url with response limitation', () => { - const response = { - 'content-type': 'xml', - 'content-language': 'zh-cn', - }; - const url = store.signatureUrl(name, { response }); - assert(url.includes('response-content-type=xml')); - assert(url.includes('response-content-language=zh-cn')); - }); - - it('should signature url with options contains other parameters', async () => { - const options = { - expires: 3600, - subResource: { - 'x-oss-process': 'image/resize,w_200', - }, - // others parameters - filename: 'test.js', - testParameters: 'xxx', - }; - const imageName = `${prefix}oss-client/oss/nodejs-test-signature-1024x768.png`; - const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); - path.join(__dirname, 'nodejs-processed-w200.png'); - await store.put(imageName, originImagePath, { - mime: 'image/png', - }); - - const signUrl = store.signatureUrl(imageName, options); - const processedKeyword = 'x-oss-process=image%2Fresize%2Cw_200'; - assert.equal(signUrl.match(processedKeyword), processedKeyword); - const urlRes = await urllib.request(signUrl); - assert.equal(urlRes.status, 200); - }); - - it('should signature url with image processed and get object ok', async () => { - const imageName = `${prefix}oss-client/oss/nodejs-test-signature-1024x768.png`; - const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); - path.join(__dirname, 'nodejs-processed-w200.png'); - await store.put(imageName, originImagePath, { - mime: 'image/png', - }); - - const signUrl = store.signatureUrl(imageName, { expires: 3600, process: 'image/resize,w_200' }); - const processedKeyword = 'x-oss-process=image%2Fresize%2Cw_200'; - assert.equal(signUrl.match(processedKeyword), processedKeyword); - const urlRes = await urllib.request(signUrl); - assert.equal(urlRes.status, 200); - }); - - it('should signature url for PUT', async () => { - const putString = 'Hello World'; - const contentMd5 = crypto.createHash('md5').update(Buffer.from(putString, 'utf8')).digest('base64'); - const url = store.signatureUrl(name, { - method: 'PUT', - 'Content-Type': 'text/plain; charset=UTF-8', - 'Content-Md5': contentMd5, - }); - const headers = { - 'Content-Type': 'text/plain; charset=UTF-8', - 'Content-MD5': contentMd5, - }; - const res = await urllib.request(url, { method: 'PUT', data: putString, headers }); - assert.equal(res.status, 200); - const headRes = await store.head(name); - assert.equal(headRes.status, 200); - }); - - it('should signature url get need escape object ok', async () => { - const result = await store.get(needEscapeName); - const url = store.signatureUrl(needEscapeName); - const urlRes = await urllib.request(url); - assert.equal(urlRes.data.toString(), result.content.toString()); - }); - - it('should signature url with custom host ok', async () => { - const conf = {}; - copy(config).to(conf); - conf.endpoint = 'www.aliyun.com'; - conf.cname = true; - const tempStore = oss(conf); - - const url = tempStore.signatureUrl(name); - // http://www.aliyun.com/darwin-v4.4.2/oss-client/oss/get-meta.js?OSSAccessKeyId= - assert.equal(url.indexOf('http://www.aliyun.com/'), 0); - }); - - it('should signature url with traffic limit', async () => { - const limit_name = `${prefix}oss-client/oss/trafficLimit.js`; - - let url; - let result; - const file_1mb = path.join(tmpdir, 'bigfile-1mb.bin'); - await writeFile(file_1mb, Buffer.alloc(1 * 1024 * 1024).fill('a\n')); - - url = store.signatureUrl(limit_name, { - trafficLimit: 8 * 1024 * 100 * 4, - method: 'PUT', - }); - - result = await store.urllib.request(url, { - method: 'PUT', - stream: fs.createReadStream(file_1mb), - timeout: 600000, - }); - assert.strictEqual(200, result.status); - - url = store.signatureUrl(name, { - trafficLimit: 8 * 1024 * 100 * 4, - }); - result = await store.urllib.request(url, { - timeout: 600000, - }); - assert.strictEqual(200, result.status); - }); - }); - - describe('getStream()', () => { - let name; - before(async () => { - name = `${prefix}oss-client/oss/get-stream.js`; - await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - }); - - it('should get exists object stream', async () => { - await utils.sleep(ms(metaSyncTime)); - const result = await store.getStream(name); - assert.equal(result.res.status, 200); - assert(result.stream instanceof Readable); - const tmpfile = path.join(tmpdir, 'get-stream.js'); - const tmpstream = fs.createWriteStream(tmpfile); - - function finish() { - return new Promise(resolve => { - tmpstream.on('finish', () => { - resolve(); - }); - }); - } - - result.stream.pipe(tmpstream); - await finish(); - assert.equal(fs.readFileSync(tmpfile, 'utf8'), fs.readFileSync(__filename, 'utf8')); - }); - - /** - * Image processing uses different compression algorithms, - * and the performance may be inconsistent - * between different regions - */ - it('should get image stream with image process', async () => { - const imageName = `${prefix}oss-client/oss/nodejs-test-getstream-image-1024x768.png`; - const originImagePath = path.join(__dirname, 'nodejs-1024x768.png'); - // const processedImagePath = path.join(__dirname, 'nodejs-processed-w200.png'); - // const processedImagePath2 = path.join(__dirname, 'nodejs-processed-w200-latest.png'); - await store.put(imageName, originImagePath, { - mime: 'image/png', - }); - - let result = await store.getStream(imageName, { process: 'image/resize,w_200' }); - let result2 = await store.getStream(imageName, { process: 'image/resize,w_200' }); - assert.equal(result.res.status, 200); - assert.equal(result2.res.status, 200); - // let isEqual = await streamEqual(result.stream, fs.createReadStream(processedImagePath)); - // let isEqual2 = await streamEqual(result2.stream, fs.createReadStream(processedImagePath2)); - // assert(isEqual || isEqual2); - result = await store.getStream(imageName, { - process: 'image/resize,w_200', - subres: { 'x-oss-process': 'image/resize,w_100' }, - }); - result2 = await store.getStream(imageName, { - process: 'image/resize,w_200', - subres: { 'x-oss-process': 'image/resize,w_100' }, - }); - assert.equal(result.res.status, 200); - assert.equal(result2.res.status, 200); - // isEqual = await streamEqual(result.stream, fs.createReadStream(processedImagePath)); - // isEqual2 = await streamEqual(result2.stream, fs.createReadStream(processedImagePath2)); - // assert(isEqual || isEqual2); - }); - - it('should throw error when object not exists', async () => { - await assert.rejects(async () => { - await store.getStream(`${name}not-exists`); - }, err => { - assert.equal(err.name, 'NoSuchKeyError'); - return true; - }); - }); - }); - - describe('delete()', () => { - it('should delete exsits object', async () => { - const name = `${prefix}oss-client/oss/delete.js`; - await store.put(name, __filename); - - const info = await store.delete(name); - assert.equal(info.res.status, 204); - - await utils.throws(async () => { - await store.head(name); - }, 'NoSuchKeyError'); - }); - - it('should delete not exists object', async () => { - const info = await store.delete('not-exists-name'); - assert.equal(info.res.status, 204); - }); - }); - - describe('deleteMulti()', () => { - const names = []; - beforeEach(async () => { - let name = `${prefix}oss-client/oss/deleteMulti0.js`; - names.push(name); - await store.put(name, __filename); - - name = `${prefix}oss-client/oss/deleteMulti1.js`; - names.push(name); - await store.put(name, __filename); - - name = `${prefix}oss-client/oss/deleteMulti2.js`; - names.push(name); - await store.put(name, __filename); - }); - - it('should delete 3 exists objs', async () => { - const result = await store.deleteMulti(names); - assert.deepEqual( - result.deleted.map(v => v.Key), - names - ); - assert.equal(result.res.status, 200); - }); - - it('should delete 2 exists and 2 not exists objs', async () => { - const result = await store.deleteMulti(names.slice(0, 2).concat([ 'not-exist1', 'not-exist2' ])); - assert.deepEqual( - result.deleted.map(v => v.Key), - names.slice(0, 2).concat([ 'not-exist1', 'not-exist2' ]) - ); - assert.equal(result.res.status, 200); - }); - - it('should delete 1 exists objs', async () => { - const result = await store.deleteMulti(names.slice(0, 1)); - assert.deepEqual( - result.deleted.map(v => v.Key), - names.slice(0, 1) - ); - assert.equal(result.res.status, 200); - }); - - it('should delete in quiet mode', async () => { - const result = await store.deleteMulti(names, { - quiet: true, - }); - assert(result.deleted.length === 0); - assert.equal(result.res.status, 200); - }); - }); - - describe.skip('copy()', () => { - let name; - let resHeaders; - let otherBucket; - let otherBucketObject; - before(async () => { - name = `${prefix}oss-client/oss/copy-meta.js`; - const object = await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - resHeaders = object.res.headers; - - otherBucket = `oss-client-copy-source-bucket-${prefix.replace(/[/.]/g, '-')}`; - otherBucket = otherBucket.substring(0, otherBucket.length - 1); - await store.putBucket(otherBucket); - store.useBucket(otherBucket); - otherBucketObject = `${prefix}oss-client/oss/copy-source.js`; - await store.put(otherBucketObject, __filename); - store.useBucket(bucket); - }); - - after(async () => { - await utils.cleanBucket(store, otherBucket); - store.useBucket(bucket); - }); - - it('should copy object from same bucket', async () => { - const originname = `${prefix}oss-client/oss/copy-new.js`; - const result = await store.copy(originname, name); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - - const info = await store.head(originname); - assert.equal(info.meta.uid, '1'); - assert.equal(info.meta.pid, '123'); - assert.equal(info.meta.slus, 'test.html'); - assert.equal(info.status, 200); - }); - - it('should copy object from same bucket and set content-disposition', async () => { - const originname = `${prefix}oss-client/oss/copy-content-disposition.js`; - const disposition = 'attachment; filename=test'; - const result = await store.copy(originname, name, { - headers: { - 'Content-Disposition': disposition, - }, - }); - assert.strictEqual(result.res.status, 200); - const { res } = await store.get(originname); - assert.strictEqual(res.headers['content-disposition'], disposition); - }); - - it('should copy object from other bucket, sourceBucket in copySource', async () => { - const copySource = `/${otherBucket}/${otherBucketObject}`; - const copyTarget = `${prefix}oss-client/oss/copy-target.js`; - const result = await store.copy(copyTarget, copySource); - assert.equal(result.res.status, 200); - - const info = await store.head(copyTarget); - assert.equal(info.status, 200); - }); - - it('should copy object from other bucket, sourceBucket is a separate parameter', async () => { - const copySource = otherBucketObject; - const copyTarget = `${prefix}oss-client/oss/has-bucket-name-copy-target.js`; - const result = await store.copy(copyTarget, copySource, otherBucket); - assert.equal(result.res.status, 200); - - const info = await store.head(copyTarget); - assert.equal(info.status, 200); - }); - - it('should copy object with non-english name', async () => { - const sourceName = `${prefix}oss-client/oss/copy-meta_测试.js`; - let result = await store.put(sourceName, __filename, { - meta: { - uid: 2, - pid: '1234', - slus: 'test1.html', - }, - }); - - const originname = `${prefix}oss-client/oss/copy-new_测试.js`; - result = await store.copy(originname, sourceName); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - - const info = await store.head(originname); - assert.equal(info.meta.uid, '2'); - assert.equal(info.meta.pid, '1234'); - assert.equal(info.meta.slus, 'test1.html'); - assert.equal(info.status, 200); - }); - - it('should copy object with non-english name and bucket', async () => { - let sourceName = `${prefix}oss-client/oss/copy-meta_测试2.js`; - let result = await store.put(sourceName, __filename, { - meta: { - uid: 3, - pid: '12345', - slus: 'test2.html', - }, - }); - - let info = await store.head(sourceName); - assert.equal(info.meta.uid, '3'); - assert.equal(info.meta.pid, '12345'); - assert.equal(info.meta.slus, 'test2.html'); - assert.equal(info.status, 200); - - sourceName = `/${bucket}/${sourceName}`; - const originname = `${prefix}oss-client/oss/copy-new_测试2.js`; - result = await store.copy(originname, sourceName); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - - info = await store.head(originname); - assert.equal(info.meta.uid, '3'); - assert.equal(info.meta.pid, '12345'); - assert.equal(info.meta.slus, 'test2.html'); - assert.equal(info.status, 200); - }); - - it('should copy object and set other meta', async () => { - const originname = `${prefix}oss-client/oss/copy-new-2.js`; - const result = await store.copy(originname, name, { - meta: { - uid: '2', - }, - }); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - - const info = await store.head(originname); - assert.equal(info.meta.uid, '2'); - assert(!info.meta.pid); - assert(!info.meta.slus); - assert.equal(info.status, 200); - }); - - it('should copy object with special characters such as ;,/?:@&=+$#', async () => { - const sourceName = `${prefix}oss-client/oss/copy-a;,/?:@&=+$#b.js`; - const tempFile = await utils.createTempFile('t', 1024 * 1024); - await store.put(sourceName, tempFile); - await store.copy(`${prefix}oss-client/oss/copy-a.js`, sourceName); - await store.copy(`${prefix}oss-client/oss/copy-a+b.js`, sourceName); - }); - - it('should use copy to change exists object headers', async () => { - const originname = `${prefix}oss-client/oss/copy-new-3.js`; - let result = await store.copy(originname, name); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - let info = await store.head(originname); - assert(!info.res.headers['cache-control']); - - // add Cache-Control header to a exists object - result = await store.copy(originname, originname, { - headers: { - 'Cache-Control': 'max-age=0, s-maxage=86400', - }, - }); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - info = await store.head(originname); - assert.equal(info.res.headers['cache-control'], 'max-age=0, s-maxage=86400'); - }); - - it('should throw NoSuchKeyError when source object not exists', async () => { - await utils.throws( - async () => { - await store.copy('new-object', 'not-exists-object'); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.message, 'The specified key does not exist.'); - assert.equal(err.status, 404); - } - ); - }); - - describe('If-Match header', () => { - it('should throw PreconditionFailedError when If-Match not equal source object etag', async () => { - await assert.rejects( - async () => { - await store.copy('new-name', name, { - headers: { - 'If-Match': 'foo-bar', - }, - }); - }, - err => { - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal( - err.message, - 'At least one of the pre-conditions you specified did not hold. (condition: If-Match)' - ); - assert.equal(err.status, 412); - return true; - } - ); - }); - - it('should copy object when If-Match equal source object etag', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Match.js`; - const result = await store.copy(originname, name, { - headers: { - 'If-Match': resHeaders.etag, - }, - }); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - }); - }); - - describe('If-None-Match header', () => { - it('should return 304 when If-None-Match equal source object etag', async () => { - const result = await store.copy('new-name', name, { - headers: { - 'If-None-Match': resHeaders.etag, - }, - }); - assert.equal(result.res.status, 304); - assert.equal(result.data, null); - }); - - it('should copy object when If-None-Match not equal source object etag', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-None-Match.js`; - const result = await store.copy(originname, name, { - headers: { - 'If-None-Match': 'foo-bar', - }, - }); - assert.equal(result.res.status, 200); - assert.equal(typeof result.data.etag, 'string'); - assert.equal(typeof result.data.lastModified, 'string'); - }); - }); - - describe('If-Modified-Since header', () => { - it('should 304 when If-Modified-Since > source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - const result = await store.copy(originname, name, { - headers: { - 'If-Modified-Since': nextYear, - }, - }); - assert.equal(result.res.status, 304); - }); - - it('should 304 when If-Modified-Since >= source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; - const result = await store.copy(originname, name, { - headers: { - 'If-Modified-Since': resHeaders.date, - }, - }); - assert.equal(result.res.status, 304); - }); - - it('should 200 when If-Modified-Since < source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Modified-Since.js`; - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - const result = await store.copy(originname, name, { - headers: { - 'If-Modified-Since': lastYear, - }, - }); - assert.equal(result.res.status, 200); - }); - }); - - describe('If-Unmodified-Since header', () => { - it('should 200 when If-Unmodified-Since > source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; - let nextYear = new Date(resHeaders.date); - nextYear.setFullYear(nextYear.getFullYear() + 1); - nextYear = nextYear.toGMTString(); - const result = await store.copy(originname, name, { - headers: { - 'If-Unmodified-Since': nextYear, - }, - }); - assert.equal(result.res.status, 200); - }); - - it('should 200 when If-Unmodified-Since >= source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; - const result = await store.copy(originname, name, { - headers: { - 'If-Unmodified-Since': resHeaders.date, - }, - }); - assert.equal(result.res.status, 200); - }); - - it('should throw PreconditionFailedError when If-Unmodified-Since < source object modified time', async () => { - const originname = `${prefix}oss-client/oss/copy-new-If-Unmodified-Since.js`; - let lastYear = new Date(resHeaders.date); - lastYear.setFullYear(lastYear.getFullYear() - 1); - lastYear = lastYear.toGMTString(); - await assert.rejects( - async () => { - await store.copy(originname, name, { - headers: { - 'If-Unmodified-Since': lastYear, - }, - }); - }, - err => { - assert.equal(err.name, 'PreconditionFailedError'); - assert.equal( - err.message, - 'At least one of the pre-conditions you specified did not hold. (condition: If-Unmodified-Since)' - ); - assert.equal(err.status, 412); - return true; - } - ); - }); - }); - }); - - describe('putMeta()', () => { - let name; - before(async () => { - name = `${prefix}oss-client/oss/putMeta.js`; - const object = await store.put(name, __filename, { - meta: { - uid: 1, - pid: '123', - slus: 'test.html', - }, - }); - assert.equal(typeof object.res.headers['x-oss-request-id'], 'string'); - }); - - it('should update exists object meta', async () => { - await store.putMeta(name, { - uid: '2', - }); - const info = await store.head(name); - assert.equal(info.meta.uid, '2'); - assert(!info.meta.pid); - assert(!info.meta.slus); - }); - - it('should throw NoSuchKeyError when update not exists object meta', async () => { - await assert.rejects( - async () => { - await store.putMeta(`${name}not-exists`, { - uid: '2', - }); - }, - err => { - assert.equal(err.name, 'NoSuchKeyError'); - assert.equal(err.status, 404); - return true; - } - ); - }); - }); - - describe('list()', () => { - // oss.jpg - // fun/test.jpg - // fun/movie/001.avi - // fun/movie/007.avi - let listPrefix; - before(async () => { - listPrefix = `${prefix}oss-client/list/`; - await store.put(`${listPrefix}oss.jpg`, Buffer.from('oss.jpg')); - await store.put(`${listPrefix}fun/test.jpg`, Buffer.from('fun/test.jpg')); - await store.put(`${listPrefix}fun/movie/001.avi`, Buffer.from('fun/movie/001.avi')); - await store.put(`${listPrefix}fun/movie/007.avi`, Buffer.from('fun/movie/007.avi')); - await store.put(`${listPrefix}other/movie/007.avi`, Buffer.from('other/movie/007.avi')); - await store.put(`${listPrefix}other/movie/008.avi`, Buffer.from('other/movie/008.avi')); - }); - - function checkObjectProperties(obj) { - assert.equal(typeof obj.name, 'string'); - assert.equal(typeof obj.lastModified, 'string'); - assert.equal(typeof obj.etag, 'string'); - assert(obj.type === 'Normal' || obj.type === 'Multipart'); - assert.equal(typeof obj.size, 'number'); - assert.equal(obj.storageClass, 'Standard'); - assert.equal(typeof obj.owner, 'object'); - assert.equal(typeof obj.owner.id, 'string'); - assert.equal(typeof obj.owner.displayName, 'string'); - } - - it('should list only 1 object', async () => { - const result = await store.list({ - 'max-keys': 1, - }); - assert(result.objects.length <= 1); - result.objects.map(checkObjectProperties); - assert.equal(typeof result.nextMarker, 'string'); - assert(result.isTruncated); - assert.equal(result.prefixes, null); - }); - - it('should list top 3 objects', async () => { - const result = await store.list({ - 'max-keys': 3, - }); - assert(result.objects.length <= 3); - result.objects.map(checkObjectProperties); - assert.equal(typeof result.nextMarker, 'string'); - assert(result.isTruncated); - assert.equal(result.prefixes, null); - - // next 2 - const result2 = await store.list({ - 'max-keys': 2, - marker: result.nextMarker, - }); - assert(result2.objects.length <= 2); - result.objects.map(checkObjectProperties); - assert.equal(typeof result2.nextMarker, 'string'); - assert(result2.isTruncated); - assert.equal(result2.prefixes, null); - }); - - it('should list with prefix', async () => { - let result = await store.list({ - prefix: `${listPrefix}fun/movie/`, - }); - assert.equal(result.objects.length, 2); - result.objects.map(checkObjectProperties); - assert.equal(result.nextMarker, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - - result = await store.list({ - prefix: `${listPrefix}fun/movie`, - }); - assert.equal(result.objects.length, 2); - result.objects.map(checkObjectProperties); - assert.equal(result.nextMarker, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - }); - - it('should list current dir files only', async () => { - let result = await store.list({ - prefix: listPrefix, - delimiter: '/', - }); - assert.equal(result.objects.length, 1); - result.objects.map(checkObjectProperties); - assert.equal(result.nextMarker, null); - assert(!result.isTruncated); - assert.deepEqual(result.prefixes, [ `${listPrefix}fun/`, `${listPrefix}other/` ]); - - result = await store.list({ - prefix: `${listPrefix}fun/`, - delimiter: '/', - }); - assert.equal(result.objects.length, 1); - result.objects.map(checkObjectProperties); - assert.equal(result.nextMarker, null); - assert(!result.isTruncated); - assert.deepEqual(result.prefixes, [ `${listPrefix}fun/movie/` ]); - - result = await store.list({ - prefix: `${listPrefix}fun/movie/`, - delimiter: '/', - }); - assert.equal(result.objects.length, 2); - result.objects.map(checkObjectProperties); - assert.equal(result.nextMarker, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - }); - }); - - describe('listV2()', () => { - let listPrefix; - before(async () => { - listPrefix = `${prefix}oss-client/listV2/`; - await store.put(`${listPrefix}oss.jpg`, Buffer.from('oss.jpg')); - await store.put(`${listPrefix}fun/test.jpg`, Buffer.from('fun/test.jpg')); - await store.put(`${listPrefix}fun/movie/001.avi`, Buffer.from('fun/movie/001.avi')); - await store.put(`${listPrefix}fun/movie/007.avi`, Buffer.from('fun/movie/007.avi')); - await store.put(`${listPrefix}other/movie/007.avi`, Buffer.from('other/movie/007.avi')); - await store.put(`${listPrefix}other/movie/008.avi`, Buffer.from('other/movie/008.avi')); - }); - - function checkObjectProperties(obj, options) { - assert.equal(typeof obj.name, 'string'); - assert.equal(typeof obj.lastModified, 'string'); - assert.equal(typeof obj.etag, 'string'); - assert(obj.type === 'Normal' || obj.type === 'Multipart'); - assert.equal(typeof obj.size, 'number'); - assert.equal(obj.storageClass, 'Standard'); - if (options.owner) { - assert(typeof obj.owner.id === 'string' && typeof obj.owner.displayName === 'string'); - } else { - assert(obj.owner === null); - } - } - - it('should list top 3 objects', async () => { - const result = await store.listV2({ - 'max-keys': 1, - }); - assert(result.objects.length <= 1); - result.objects.forEach(checkObjectProperties); - assert.equal(typeof result.nextContinuationToken, 'string'); - assert(result.isTruncated); - assert.equal(result.prefixes, null); - - // next 2 - const result2 = await store.listV2({ - 'max-keys': 2, - continuationToken: result.nextContinuationToken, - }); - assert(result2.objects.length <= 2); - result.objects.forEach(checkObjectProperties); - assert.equal(typeof result2.nextContinuationToken, 'string'); - assert(result2.isTruncated); - assert.equal(result2.prefixes, null); - }); - - it('should list with prefix', async () => { - let result = await store.listV2({ - prefix: `${listPrefix}fun/movie/`, - 'fetch-owner': true, - }); - assert.equal(result.objects.length, 2); - result.objects.forEach(obj => checkObjectProperties(obj, { owner: true })); - assert.equal(result.nextContinuationToken, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - - result = await store.listV2({ - prefix: `${listPrefix}fun/movie`, - }); - assert.equal(result.objects.length, 2); - result.objects.forEach(checkObjectProperties); - assert.equal(result.nextContinuationToken, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - }); - - it('should list current dir files only', async () => { - let result = await store.listV2({ - prefix: listPrefix, - delimiter: '/', - }); - assert.equal(result.objects.length, 1); - result.objects.forEach(checkObjectProperties); - assert.equal(result.nextContinuationToken, null); - assert(!result.isTruncated); - assert.deepEqual(result.prefixes, [ `${listPrefix}fun/`, `${listPrefix}other/` ]); - - result = await store.listV2({ - prefix: `${listPrefix}fun/`, - delimiter: '/', - }); - assert.equal(result.objects.length, 1); - result.objects.forEach(checkObjectProperties); - assert.equal(result.nextContinuationToken, null); - assert(!result.isTruncated); - assert.deepEqual(result.prefixes, [ `${listPrefix}fun/movie/` ]); - - result = await store.listV2({ - prefix: `${listPrefix}fun/movie/`, - delimiter: '/', - }); - assert.equal(result.objects.length, 2); - result.objects.forEach(checkObjectProperties); - assert.equal(result.nextContinuationToken, null); - assert(!result.isTruncated); - assert.equal(result.prefixes, null); - }); - - it('should list with start-after', async () => { - let result = await store.listV2({ - 'start-after': `${listPrefix}fun`, - 'max-keys': 1, - }); - assert(result.objects[0].name === `${listPrefix}fun/movie/001.avi`); - - result = await store.listV2({ - 'start-after': `${listPrefix}fun/movie/001.avi`, - 'max-keys': 1, - }); - assert(result.objects[0].name === `${listPrefix}fun/movie/007.avi`); - - result = await store.listV2({ - delimiter: '/', - prefix: `${listPrefix}fun/movie/`, - 'start-after': `${listPrefix}fun/movie/002.avi`, - }); - assert(result.objects.length === 1); - assert(result.objects[0].name === `${listPrefix}fun/movie/007.avi`); - - result = await store.listV2({ - prefix: `${listPrefix}`, - 'max-keys': 5, - 'start-after': `${listPrefix}a`, - delimiter: '/', - }); - assert.strictEqual(result.keyCount, 3); - assert.strictEqual(result.objects.length, 1); - assert.strictEqual(result.objects[0].name, `${listPrefix}oss.jpg`); - assert.strictEqual(result.prefixes.length, 2); - assert.strictEqual(result.prefixes[0], `${listPrefix}fun/`); - assert.strictEqual(result.prefixes[1], `${listPrefix}other/`); - - result = await store.listV2({ - prefix: `${listPrefix}`, - 'max-keys': 5, - 'start-after': `${listPrefix}oss.jpg`, - delimiter: '/', - }); - assert.strictEqual(result.keyCount, 1); - assert.strictEqual(result.objects.length, 0); - assert.strictEqual(result.prefixes[0], `${listPrefix}other/`); - }); - - it('should list with continuation-token', async () => { - let nextContinuationToken = null; - let keyCount = 0; - do { - // eslint-disable-next-line no-await-in-loop - const result = await store.listV2({ - prefix: listPrefix, - 'max-keys': 2, - 'continuation-token': nextContinuationToken, - }); - keyCount += result.keyCount; - nextContinuationToken = result.nextContinuationToken; - } while (nextContinuationToken); - assert.strictEqual(keyCount, 6); - }); - }); - - describe('putACL(), getACL()', () => { - it('should put and get object ACL', async () => { - const name = `${prefix}object/acl`; - let result = await store.put(name, Buffer.from('hello world')); - assert.equal(result.res.status, 200); - - result = await store.getACL(name); - assert.equal(result.res.status, 200); - assert.equal(result.acl, 'default'); - - result = await store.putACL(name, 'public-read'); - assert.equal(result.res.status, 200); - - result = await store.getACL(name); - assert.equal(result.res.status, 200); - assert.equal(result.acl, 'public-read'); - - result = await store.get(name); - assert.equal(result.res.status, 200); - assert.deepEqual(result.content, Buffer.from('hello world')); - }); - }); - - describe('append()', () => { - const name = `/${prefix}oss-client/oss/apend${Date.now()}`; - afterEach(async () => { - await store.delete(name); - }); - - it('should apend object with content buffer', async () => { - let object = await store.append(name, Buffer.from('foo')); - assert(object.res.status === 200); - assert(object.nextAppendPosition === '3'); - assert(object.res.headers['x-oss-next-append-position'] === '3'); - - let res = await store.get(name); - assert(res.content.toString() === 'foo'); - assert(res.res.headers['x-oss-next-append-position'] === '3'); - - object = await store.append(name, Buffer.from('bar'), { - position: 3, - }); - assert(object.res.status === 200); - assert(object.nextAppendPosition === '6'); - assert(object.res.headers['x-oss-next-append-position'] === '6'); - - res = await store.get(name); - assert(res.content.toString() === 'foobar'); - assert(res.res.headers['x-oss-next-append-position'] === '6'); - }); - - it('should apend object with local file path', async () => { - const file = path.join(__dirname, 'fixtures/foo.js'); - let object = await store.append(name, file); - assert(object.nextAppendPosition === '16'); - - object = await store.append(name, file, { position: 16 }); - assert(object.nextAppendPosition === '32'); - }); - - it('should apend object with readstream', async () => { - const file = path.join(__dirname, 'fixtures/foo.js'); - let object = await store.append(name, fs.createReadStream(file)); - assert(object.nextAppendPosition === '16'); - - object = await store.append(name, fs.createReadStream(file), { - position: 16, - }); - assert(object.nextAppendPosition === '32'); - }); - - it('should error when positio not match', async () => { - await store.append(name, Buffer.from('foo')); - - try { - await store.append(name, Buffer.from('foo')); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'Position is not equal to file length'); - assert(err.name === 'PositionNotEqualToLengthError'); - } - }); - - it('should use nextAppendPosition to append next', async () => { - let object = await store.append(name, Buffer.from('foo')); - assert(object.nextAppendPosition === '3'); - - object = await store.append(name, Buffer.from('bar'), { - position: object.nextAppendPosition, - }); - - object = await store.append(name, Buffer.from('baz'), { - position: object.nextAppendPosition, - }); - - const res = await store.get(name); - assert(res.content.toString() === 'foobarbaz'); - assert(res.res.headers['x-oss-next-append-position'] === '9'); - }); - }); - - describe.skip('restore()', () => { - before(async () => { - await store.put('/oss/coldRestore.js', __filename, { - headers: { - 'x-oss-storage-class': 'ColdArchive', - }, - }); - await store.put('/oss/daysRestore.js', __filename, { - headers: { - 'x-oss-storage-class': 'ColdArchive', - }, - }); - }); - after(async () => { - await store.useBucket(bucket); - }); - - it('Should return OperationNotSupportedError when the type of bucket is not archive', async () => { - const name = '/oss/restore.js'; - await store.put(name, __filename); - - try { - await store.restore(name); - throw new Error('should not run this'); - } catch (err) { - assert.equal(err.status, 400); - } - }); - - // it.skip('Should return 202 when restore is called first', async () => { - // store.setBucket(archvieBucket); - // const name = '/oss/restore.js'; - // await store.put(name, __filename); - - // const info = await store.restore(name); - // assert.equal(info.res.status, 202); - - // // in 1 minute verify RestoreAlreadyInProgressError - // try { - // await store.restore(name); - // } catch (err) { - // assert.equal(err.name, 'RestoreAlreadyInProgressError'); - // } - // }); - - // it.skip('Category should be Archive', async () => { - // const name = '/oss/restore.js'; - // try { - // await store.restore(name, { type: 'ColdArchive' }); - // } catch (err) { - // assert.equal(err.code, 'MalformedXML'); - // } - // await store.useBucket(bucket); - // }); - - it('ColdArchive choice Days', async () => { - const name = '/oss/daysRestore.js'; - const result = await store.restore(name, { - type: 'ColdArchive', - Days: 2, - }); - assert.equal( - [ 'Expedited', 'Standard', 'Bulk' ].includes(result.res.headers['x-oss-object-restore-priority']), - true - ); - }); - - it('ColdArchive is Accepted', async () => { - const name = '/oss/coldRestore.js'; - const result = await store.restore(name, { - type: 'ColdArchive', - }); - assert.equal( - [ 'Expedited', 'Standard', 'Bulk' ].includes(result.res.headers['x-oss-object-restore-priority']), - true - ); - }); - }); - - describe('symlink()', () => { - it('Should put and get Symlink', async () => { - const targetName = '/oss/target-测试.js'; - const name = '/oss/symlink-软链接.js'; - let result = await store.put(targetName, __filename); - assert.equal(result.res.status, 200); - - result = await store.putSymlink(name, targetName, { - storageClass: 'IA', - meta: { - uid: '1', - slus: 'test.html', - }, - }); - assert.equal(result.res.status, 200); - - result = await store.getSymlink(name); - assert.equal(result.res.status, 200); - assert.equal(result.targetName, store._objectName(targetName)); - - result = await store.head(name); - - assert.equal(result.res.status, 200); - assert.equal(result.res.headers['x-oss-object-type'], 'Symlink'); - assert.deepEqual(result.meta, { - uid: '1', - slus: 'test.html', - }); - // TODO getObjectMeta should return storage class, - // headObject return targetObject storage class - // result = await store.getObjectMeta(name); - // console.log(result); - }); - }); - - describe('calculatePostSignature()', () => { - it('should get signature for postObject', async () => { - // not work on Node.js 14 with undici - if (process.version.startsWith('v14.')) return; - const name = 'calculatePostSignature.js'; - const url = store.generateObjectUrl(name).replace(name, ''); - const date = new Date(); - date.setDate(date.getDate() + 1); - const policy = { - expiration: date.toISOString(), - conditions: [{ bucket: store.options.bucket }], - }; - - const params = store.calculatePostSignature(policy); - const options = { - method: 'POST', - data: { - ...params, - key: name, - }, - files: { - file: fs.createReadStream(__filename), - }, - }; - - const result = await urllib.request(url, options); - assert(result.statusCode === 204); - const headRes = await store.head(name); - assert.equal(headRes.status, 200); - // console.log(headRes.res.headers); - }); - - it('should throw error when policy is not JSON or Object', async () => { - let policy = 'string'; - const errorMessage = 'policy must be JSON string or Object'; - try { - store.calculatePostSignature(policy); - assert(false); - } catch (error) { - assert.strictEqual(errorMessage, error.message); - } - try { - policy = 123; - store.calculatePostSignature(policy); - assert(false); - } catch (error) { - assert.strictEqual(errorMessage, error.message); - } - }); - }); - - describe('getObjectTagging() putObjectTagging() deleteObjectTagging()', () => { - const name = '/oss/tagging.js'; - - before(async () => { - await store.put(name, __filename); - }); - - it('should get the tags of object', async () => { - const result = await store.getObjectTagging(name); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - }); - - it('should configures or updates the tags of object', async () => { - let result; - let tag = { a: '1', b: '2' }; - result = await store.putObjectTagging(name, tag); - assert.strictEqual(result.status, 200); - - result = await store.getObjectTagging(name); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - - tag = { a: '1' }; - result = await store.putObjectTagging(name, tag); - assert.strictEqual(result.status, 200); - - result = await store.getObjectTagging(name); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - }); - - it('maximum of 10 tags for a object', async () => { - await assert.rejects(async () => { - const tag = {}; - Array(11) - .fill(1) - .forEach((_, index) => { - tag[index] = index; - }); - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('maximum of 10 tags for a object', err.message); - return true; - }); - }); - - it('tag can contain invalid string', async () => { - await assert.rejects(async () => { - const errorStr = '错误字符串@#¥%……&*!'; - const key = errorStr; - const value = errorStr; - const tag = { [key]: value }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual( - 'tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)', - err.message); - return true; - }); - }); - - it('tag key can be a maximum of 128 bytes in length', async () => { - await assert.rejects(async () => { - const key = new Array(129).fill('1').join(''); - const tag = { [key]: '1' }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('tag key can be a maximum of 128 bytes in length', err.message); - return true; - }); - }); - - it('tag value can be a maximum of 256 bytes in length', async () => { - await assert.rejects(async () => { - const value = new Array(257).fill('1').join(''); - const tag = { a: value }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('tag value can be a maximum of 256 bytes in length', err.message); - return true; - }); - }); - - it('should throw error when the type of tag is not Object', async () => { - await assert.rejects(async () => { - const tag = [{ a: 1 }]; - await store.putObjectTagging(name, tag); - }, err => { - assert(err.message.includes('tag must be Object')); - return true; - }); - }); - - it('should throw error when the type of tag value is number', async () => { - await assert.rejects(async () => { - const tag = { a: 1 }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('the key and value of the tag must be String', err.message); - return true; - }); - }); - - it('should throw error when the type of tag value is Object', async () => { - await assert.rejects(async () => { - const tag = { a: { inner: '1' } }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('the key and value of the tag must be String', err.message); - return true; - }); - }); - - it('should throw error when the type of tag value is Array', async () => { - await assert.rejects(async () => { - const tag = { a: [ '1', '2' ] }; - await store.putObjectTagging(name, tag); - }, err => { - assert.strictEqual('the key and value of the tag must be String', err.message); - return true; - }); - }); - - it('should delete the tags of object', async () => { - let result; - const tag = { a: '1', b: '2' }; - await store.putObjectTagging(name, tag); - - result = await store.deleteObjectTagging(name); - assert.strictEqual(result.status, 204); - - result = await store.getObjectTagging(name); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - }); - }); - - describe('options.headerEncoding', () => { - const utf8_content = '阿达的大多'; - // const latin1_content = Buffer.from(utf8_content).toString('latin1'); - let name; - before(async () => { - store.options.headerEncoding = 'latin1'; - name = `${prefix}oss-client/oss/put-new-latin1.js`; - const result = await store.put(name, __filename, { - meta: { - a: utf8_content, - }, - }); - assert.equal(result.res.status, 200); - const info = await store.head(name); - assert.equal(info.status, 200); - // assert.equal(info.meta.a, latin1_content); - assert.equal(info.meta.a, utf8_content); - }); - - after(() => { - store.options.headerEncoding = 'utf-8'; - }); - - it('copy() should return 200 when set zh-cn meta', async () => { - const originname = `${prefix}oss-client/oss/copy-new-latin1.js`; - const result = await store.copy(originname, name, { - meta: { - a: utf8_content, - }, - }); - assert.equal(result.res.status, 200); - const info = await store.head(originname); - assert.equal(info.status, 200); - // assert.equal(info.meta.a, latin1_content); - assert.equal(info.meta.a, utf8_content); - }); - - it('copy() should return 200 when set zh-cn meta with zh-cn object name', async () => { - const originname = `${prefix}oss-client/oss/copy-new-latin1-中文.js`; - const result = await store.copy(originname, name, { - meta: { - a: utf8_content, - }, - }); - assert.equal(result.res.status, 200); - const info = await store.head(originname); - assert.equal(info.status, 200); - // assert.equal(info.meta.a, latin1_content); - assert.equal(info.meta.a, utf8_content); - }); - - it('putMeta() should return 200', async () => { - const result = await store.putMeta(name, { - b: utf8_content, - }); - assert.equal(result.res.status, 200); - const info = await store.head(name); - assert.equal(info.status, 200); - // assert.equal(info.meta.b, latin1_content); - assert.equal(info.meta.b, utf8_content); - }); - }); -}); diff --git a/test/rtmp.test.js b/test/rtmp.test.js deleted file mode 100644 index 466c1eefb..000000000 --- a/test/rtmp.test.js +++ /dev/null @@ -1,220 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const is = require('is-type-of'); -const oss = require('..'); -const config = require('./config').oss; - -describe.skip('test/rtmp.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - let bucketRegion; - let cid; - let conf; - before(async () => { - store = oss(config); - bucket = `oss-client-test-bucket-rtmp-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - store.useBucket(bucket); - - const result = await store.putBucket(bucket, bucketRegion); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - - cid = 'channel-1'; - conf = { - Description: 'this is channel 1', - Status: 'enabled', - Target: { - Type: 'HLS', - FragDuration: '10', - FragCount: '5', - PlaylistName: 'playlist.m3u8', - }, - }; - }); - - describe('put/get/deleteChannel()', () => { - it('should create a new channel', async () => { - const tempCid = cid; - const tempConf = conf; - - let result = await store.putChannel(tempCid, tempConf); - assert.equal(result.res.status, 200); - assert(is.array(result.publishUrls)); - assert(result.publishUrls.length > 0); - assert(is.array(result.playUrls)); - assert(result.playUrls.length > 0); - - result = await store.getChannel(tempCid); - assert.equal(result.res.status, 200); - assert.deepEqual(result.data, conf); - - result = await store.deleteChannel(tempCid); - assert.equal(result.res.status, 204); - - await utils.throws(async () => { - await store.getChannel(tempCid); - }, err => { - assert.equal(err.status, 404); - }); - }); - }); - - describe('put/getChannelStatus()', () => { - let statusConfCid; - before(async () => { - statusConfCid = 'live channel 2'; - const statusConf = conf; - statusConf.Description = 'this is live channel 2'; - await store.putChannel(statusConfCid, statusConf); - }); - - after(async () => { - await store.deleteChannel(statusConfCid); - }); - - it('should disable channel', async () => { - let result = await store.getChannelStatus(statusConfCid); - assert.equal(result.res.status, 200); - assert.equal(result.data.Status, 'Idle'); - - // TODO: verify ConnectedTime/RemoteAddr/Video/Audio when not idle - - result = await store.putChannelStatus(statusConfCid, 'disabled'); - assert.equal(result.res.status, 200); - - result = await store.getChannelStatus(statusConfCid); - assert.equal(result.res.status, 200); - assert.equal(result.data.Status, 'Disabled'); - }); - }); - - describe('listChannels()', () => { - let channelNum; - let channelPrefix; - before(async () => { - channelNum = 10; - channelPrefix = 'channel-list-'; - await Promise.all(Array(channelNum).fill(1).map((_, i) => { - conf.Description = i; - return store.putChannel(channelPrefix + i, conf); - })); - }); - - after(async () => { - await Promise.all(Array(channelNum).fill(1).map((_, i) => store.deleteChannel(channelPrefix + i))); - }); - - it('list channels using prefix/marker/max-keys', async () => { - const query = { - prefix: 'channel-list-', - marker: 'channel-list-4', - 'max-keys': 3, - }; - - const result = await store.listChannels(query); - - assert.equal(result.res.status, 200); - assert.equal(result.nextMarker, 'channel-list-7'); - assert.equal(result.isTruncated, true); - - const { channels } = result; - assert.equal(channels.length, 3); - assert.equal(channels[0].Name, channelPrefix + 5); - assert.equal(channels[1].Name, channelPrefix + 6); - assert.equal(channels[2].Name, channelPrefix + 7); - }); - }); - - describe('getChannelHistory()', () => { - let historyCid; - before(async () => { - historyCid = 'channel-3'; - const historyconf = conf; - historyconf.Description = 'this is live channel 3'; - await store.putChannel(historyCid, historyconf); - }); - - after(async () => { - await store.deleteChannel(historyCid); - }); - - it('should get channel history', async () => { - const result = await store.getChannelHistory(historyCid); - - assert.equal(result.res.status, 200); - assert(is.array(result.records)); - assert.equal(result.records.length, 0); - - // TODO: verify LiveRecord when history exists - // verify wish OBS or ffmpeg - }); - }); - - describe('createVod()', () => { - let createVodCid; - before(async () => { - createVodCid = 'channel-4'; - const createVodConf = conf; - createVodConf.Description = 'this is live channel 4'; - const result = await store.putChannel(createVodCid, createVodConf); - assert.equal(result.res.status, 200); - const url = store.getRtmpUrl(createVodCid, { - params: { - playlistName: 'vod.m3u8', - }, - expires: 3600, - }); - console.log(url); - }); - - after(async () => { - await store.deleteChannel(createVodCid); - }); - - // this case need have data in server - it.skip('should create vod playlist', async () => { - const name = 'vod.m3u8'; - const now = Date.now(); - - try { - const result = await store.createVod(cid, name, { - startTime: Math.floor((now - 100) / 1000), - endTime: Math.floor(now / 1000), - }); - - assert.equal(result.res.status, 200); - } catch (err) { - console.error(err); - } - }); - }); - - describe('getRtmpUrl()', () => { - let getRtmpUrlCid; - before(async () => { - getRtmpUrlCid = 'channel-5'; - const getRtmpUrlConf = conf; - getRtmpUrlConf.Description = 'this is live channel 5'; - const result = await store.putChannel(getRtmpUrlCid, getRtmpUrlConf); - assert.equal(result.res.status, 200); - }); - - after(async () => { - await store.deleteChannel(getRtmpUrlCid); - }); - - it('should get rtmp url', () => { - const name = 'vod.m3u8'; - const url = store.getRtmpUrl(getRtmpUrlCid, { - params: { - playlistName: name, - }, - expires: 3600, - }); - console.log(url); - // verify the url is ok used by OBS or ffmpeg - }); - }); -}); diff --git a/test/sts.test.js b/test/sts.test.js deleted file mode 100644 index fe5492e8f..000000000 --- a/test/sts.test.js +++ /dev/null @@ -1,229 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const sts = require('..').STS; -const OSS = require('..'); -const config = require('./config').oss; -const stsConfig = require('./config').sts; -const mm = require('mm'); - -describe.skip('test/sts.test.js', () => { - const { prefix } = utils; - describe('assumeRole()', () => { - it('should assume role', async () => { - const stsClient = sts(stsConfig); - const result = await stsClient.assumeRole(stsConfig.roleArn); - assert.strictEqual(result.res.status, 200); - }); - - it('should assume role with policy', async () => { - const stsClient = sts(stsConfig); - const policy = { - Statement: [ - { - Action: [ 'oss:*' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - const result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - }); - - it('should assume role with policy string', async () => { - const stsClient = sts(stsConfig); - const policy = ` - { - "Statement": [ - { - "Action": [ - "oss:*" - ], - "Effect": "Allow", - "Resource": ["acs:oss:*:*:*"] - } - ], - "Version": "1" - }`; - const result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - }); - - it('should handle error in assume role', async () => { - const stsClient = sts(stsConfig); - const policy = ` - { - "Statements": [ - { - "Action": [ - "oss:*" - ], - "Effect": "Allow", - "Resource": ["acs:oss:*:*:*"] - } - ], - "Version": "1" - }`; - - try { - await stsClient.assumeRole(stsConfig.roleArn, policy); - assert(false); - } catch (err) { - err.message.should.match(/InvalidParameter.PolicyGrammar/); - } - }); - - it('should list objects using STS', async () => { - const stsClient = sts(stsConfig); - let result = await stsClient.assumeRole(stsConfig.roleArn); - assert.strictEqual(result.res.status, 200); - - const ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - const name = `${prefix}oss-client/oss/sts-put1.js`; - result = await ossClient.put(name, __filename); - assert.strictEqual(result.res.status, 200); - - result = await ossClient.list({ - 'max-keys': 10, - }); - - assert.strictEqual(result.res.status, 200); - }); - - it('should delete multi objects using STS', async () => { - const stsClient = sts(stsConfig); - - let policy = { - Statement: [ - { - Action: [ 'oss:PutObject' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - - let result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - - let ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - const name1 = `${prefix}oss-client/oss/sts-put1.js`; - const name2 = `${prefix}oss-client/oss/sts-put2.js`; - result = await ossClient.put(name1, __filename); - assert.strictEqual(result.res.status, 200); - - result = await ossClient.put(name2, __filename); - assert.strictEqual(result.res.status, 200); - - try { - await ossClient.deleteMulti([ name1, name2 ]); - assert(false); - } catch (err) { - err.message.should.match(/Access denied by authorizer's policy/); - } - - policy = { - Statement: [ - { - Action: [ 'oss:DeleteObject' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - - result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - - ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - result = await ossClient.deleteMulti([ name1, name2 ]); - assert.strictEqual(result.res.status, 200); - }); - }); - - describe('refreshSTSToken()', () => { - let stsClient; - let store; - before(async () => { - stsClient = sts(stsConfig); - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - const testRefreshSTSTokenConf = { - region: config.region, - accessKeyId: credentials.AccessKeyId, - accessKeySecret: credentials.AccessKeySecret, - stsToken: credentials.SecurityToken, - bucket: stsConfig.bucket, - refreshSTSTokenInterval: 1000, - }; - store = new OSS(testRefreshSTSTokenConf); - }); - - it('should refresh sts token when token is expired', async () => { - try { - store.options.refreshSTSToken = async () => { - mm.restore(); - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - return credentials; - }; - const ak = store.options.accessKeyId; - await store.listBuckets(); - assert.strictEqual(ak, store.options.accessKeyId); - await utils.sleep(2000); - await store.listBuckets(); - assert.notStrictEqual(ak, store.options.accessKeyId); - } catch (error) { - assert(false, error); - } - }); - - it('asyncSignatureUrl will should use refreshSTSToken', async () => { - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - let flag = false; - - store = new OSS({ - region: config.region, - accessKeyId: credentials.AccessKeyId, - accessKeySecret: credentials.AccessKeySecret, - stsToken: credentials.SecurityToken, - refreshSTSToken: () => { - flag = true; - return { - accessKeyId: 'b', - accessKeySecret: 'b', - stsToken: 'b', - }; - }, - bucket: stsConfig.bucket, - refreshSTSTokenInterval: 1000, - }); - await utils.sleep(2000); - await store.asyncSignatureUrl('test.txt'); - - assert(flag); - }); - }); -}); diff --git a/test/util/isIP.test.ts b/test/util/isIP.test.ts new file mode 100644 index 000000000..9c8c1a963 --- /dev/null +++ b/test/util/isIP.test.ts @@ -0,0 +1,88 @@ +import { strict as assert } from 'node:assert'; +import { isIP } from '../../src/util/index.js'; + +describe('test/util/isIP.test.ts', () => { + it('ipv4 test', () => { + // first length is 3 + assert.equal(isIP('200.255.255.255'), true); + assert.equal(isIP('223.255.255.255'), true); + assert.equal(isIP('224.255.255.255'), true); + assert.equal(isIP('192.0.0.1'), true); + assert.equal(isIP('127.0.0.1'), true); + assert.equal(isIP('100.0.0.1'), true); + assert.equal(isIP('90.0.0.1'), true); + assert.equal(isIP('9.0.0.1'), true); + assert.equal(isIP('090.0.0.1'), false); + assert.equal(isIP('009.0.0.1'), false); + assert.equal(isIP('200.1.255.255'), true); + assert.equal(isIP('200.001.255.255'), false); + + // first length is 1 or 2 + assert.equal(isIP('09.255.255.255'), false); + assert.equal(isIP('9.255.255.255'), true); + assert.equal(isIP('90.255.255.255'), true); + assert.equal(isIP('00.255.255.255'), false); + assert.equal(isIP('-.0.0.1'), false); + assert.equal(isIP('0.0.0.1'), true); + assert.equal(isIP('1.0.0.1'), true); + + // test last 3 byte + assert.equal(isIP('200.0.255.255'), true); + assert.equal(isIP('200.01.255.255'), false); + assert.equal(isIP('200.1.255.255'), true); + assert.equal(isIP('200.10.255.255'), true); + assert.equal(isIP('200.256.255.255'), false); + assert.equal(isIP('200.1.255.255'), true); + assert.equal(isIP('200.001.255.255'), false); + + assert.equal(isIP('200.255.0.255'), true); + assert.equal(isIP('200.255.01.255'), false); + assert.equal(isIP('200.255.1.255'), true); + assert.equal(isIP('200.255.10.255'), true); + assert.equal(isIP('200.255.256.255'), false); + assert.equal(isIP('200.255.001.255'), false); + assert.equal(isIP('200.255.1.255'), true); + + assert.equal(isIP('200.255.255.0'), true); + assert.equal(isIP('200.255.255.01'), false); + assert.equal(isIP('200.255.255.1'), true); + assert.equal(isIP('200.255.255.10'), true); + assert.equal(isIP('200.255.255.256'), false); + assert.equal(isIP('200.255.255.001'), false); + assert.equal(isIP('200.255.255.1'), true); + + // excetion + assert.equal(isIP('200'), false); + assert.equal(isIP('200.1'), false); + assert.equal(isIP('200.1.1'), false); + assert.equal(isIP('200.1.1.1.1'), false); + }); + + it('ipv6 test', () => { + assert.equal(isIP('1:2:3:4:5:6:7::'), true); + assert.equal(isIP('1:2:3:4:5:6:7:8'), true); + assert.equal(isIP('1:2:3:4:5:6::'), true); + assert.equal(isIP('1:2:3:4:5:6::8'), true); + assert.equal(isIP('1:2:3:4:5::'), true); + assert.equal(isIP('1:2:3:4:5::8'), true); + assert.equal(isIP('1:2:3:4::'), true); + assert.equal(isIP('1:2:3:4::8'), true); + assert.equal(isIP('1:2:3::'), true); + assert.equal(isIP('1:2:3::8'), true); + assert.equal(isIP('1:2::'), true); + assert.equal(isIP('1:2::8'), true); + assert.equal(isIP('1::'), true); + assert.equal(isIP('1::8'), true); + assert.equal(isIP('::'), true); + assert.equal(isIP('::8'), true); + assert.equal(isIP('::7:8'), true); + assert.equal(isIP('::6:7:8'), true); + assert.equal(isIP('::5:6:7:8'), true); + assert.equal(isIP('::4:5:6:7:8'), true); + assert.equal(isIP('::3:4:5:6:7:8'), true); + assert.equal(isIP('::2:3:4:5:6:7:8'), true); + assert.equal(isIP('A:0f:0F:FFFF:5:6:7:8'), true); + assert.equal(isIP('A:0f:0F:FFFF1:5:6:7:8'), false); + assert.equal(isIP('G:0f:0F:FFFF:5:6:7:8'), false); + }); +}); diff --git a/test/utils.js b/test/utils.js deleted file mode 100644 index ffd930f24..000000000 --- a/test/utils.js +++ /dev/null @@ -1,174 +0,0 @@ -const assert = require('assert'); -const fs = require('fs'); -const urlutil = require('url'); -const { isObject } = require('../lib/common/utils/isObject'); - -exports.throws = async function(block, checkError) { - try { - await block(); - } catch (err) { - if (typeof checkError === 'function') { - return checkError(err); - } - // throws(block, errorName) - if (typeof checkError === 'string') { - return assert.equal(err.name, checkError); - } - // throw(block, RegExp) - if (!checkError.test(err.toString())) { - throw new Error(`expected ${err.toString()} to match ${checkError.toString()}`); - } - return false; - } - throw new Error(`${block.toString()} should throws error`); -}; - -exports.sleep = function(ms) { - return new Promise(resolve => { - setTimeout(() => { - resolve(); - }, ms); - }); -}; - -exports.cleanBucket = async function(store, bucket, multiversion) { - store.useBucket(bucket); - let result; - const options = { versionId: null }; - - if (!multiversion) { - try { - await store.getBucketVersions({ - 'max-keys': 1000, - }); - multiversion = true; - } catch (error) { - multiversion = false; - } - } - - async function handleDelete(deleteKey) { - if (multiversion) { - result = await store.getBucketVersions({ - 'max-keys': 1000, - }); - } else { - result = await store.list({ - 'max-keys': 1000, - }); - } - result[deleteKey] = result[deleteKey] || []; - - await Promise.all(result[deleteKey] - .map(_ => store.delete(_.name, multiversion ? - Object.assign({}, options, { versionId: _.versionId }) : - options))); - } - await handleDelete('objects'); - if (multiversion) { - await handleDelete('deleteMarker'); - } - - result = await store.listUploads({ - 'max-uploads': 1000, - }); - const uploads = result.uploads || []; - await Promise.all(uploads.map(_ => store.abortMultipartUpload(_.name, _.uploadId))); - - const channels = (await store.listChannels()).channels.map(_ => _.Name); - await Promise.all(channels.map(_ => store.deleteChannel(_))); - await store.deleteBucket(bucket); -}; - -exports.prefix = `${process.platform}-${process.version}-${new Date().getTime()}/`; - -exports.createTempFile = async function createTempFile(name, size) { - const tmpdir = '/tmp/.oss/'; - if (!fs.existsSync(tmpdir)) { - fs.mkdirSync(tmpdir); - } - - await new Promise((resolve, reject) => { - const rs = fs.createReadStream('/dev/urandom', { - start: 0, - end: size - 1, - }); - const ws = fs.createWriteStream(tmpdir + name); - rs.pipe(ws); - ws.on('finish', (err, res) => { - if (err) { - reject(err); - } else { - resolve(res); - } - }); - }); - - return tmpdir + name; -}; - -/* - * cb = { - * url: 'd.rockuw.com:4567', - * query: {user: 'me'}, - * contentType: 'application/json', - * body: '{"hello": "world"}' - * }; - */ -exports.encodeCallback = function(cb) { - const url = urlutil.parse(cb.url); - url.query = cb.query; - - const json = { - callbackUrl: url.format(), - callbackBody: cb.body, - callbackBodyType: cb.contentType || 'application/x-www-form-urlencoded', - }; - - return Buffer.from(JSON.stringify(json)).toString('base64'); -}; - -// 如果配置属性值是数组 则判断配置的数组是不是数据的子数组。 -// 如果配置属性值是对象 则判断数据包含的属性值包不包含配置项属性值。 -// 如果配置属性值是简单数据类型 则判断数据的有配置的属性且值相等 -exports.includesConf = function includesConf(data, conf) { - if (conf === null || typeof conf !== 'object') { - return data === conf; - } - - let valid = true; - if (Array.isArray(conf)) { - if (!Array.isArray(data)) return false; - for (let i = 0; i < conf.length; i++) { - let itemValid = false; - for (let j = 0; j < data.length; j++) { - if (includesConf(data[j], conf[i])) { - itemValid = true; - break; - } - } - if (!itemValid) return false; - } - return valid; - } - - const keys = Object.keys(conf); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if (!isObject(conf[key]) && !Array.isArray(conf[key])) { - if (conf[key] !== data[key]) { - valid = false; - break; - } - } else if (isObject(conf[key]) || Array.isArray(conf[key])) { - if (!includesConf(data[key], conf[key])) { - valid = false; - break; - } - } else if (conf[key] !== data[key]) { - valid = false; - break; - } - } - return valid; -}; diff --git a/test/utils.test.js b/test/utils.test.js deleted file mode 100644 index 3dc134427..000000000 --- a/test/utils.test.js +++ /dev/null @@ -1,226 +0,0 @@ -const { isIP: _isIP } = require('../lib/common/utils/isIP'); -const { includesConf } = require('./utils'); -const assert = require('assert'); - -describe('test/test.js', () => { - it('ipv4 test', () => { - // first length is 3 - assert.equal(_isIP('200.255.255.255'), true); - assert.equal(_isIP('223.255.255.255'), true); - assert.equal(_isIP('224.255.255.255'), true); - assert.equal(_isIP('192.0.0.1'), true); - assert.equal(_isIP('127.0.0.1'), true); - assert.equal(_isIP('100.0.0.1'), true); - assert.equal(_isIP('090.0.0.1'), true); - assert.equal(_isIP('009.0.0.1'), true); - assert.equal(_isIP('200.001.255.255'), true); - - // first length is 1 or 2 - assert.equal(_isIP('09.255.255.255'), true); - assert.equal(_isIP('90.255.255.255'), true); - assert.equal(_isIP('00.255.255.255'), true); - assert.equal(_isIP('-.0.0.1'), false); - assert.equal(_isIP('0.0.0.1'), true); - assert.equal(_isIP('1.0.0.1'), true); - - // test last 3 byte - assert.equal(_isIP('200.0.255.255'), true); - assert.equal(_isIP('200.01.255.255'), true); - assert.equal(_isIP('200.10.255.255'), true); - assert.equal(_isIP('200.256.255.255'), false); - assert.equal(_isIP('200.001.255.255'), true); - - assert.equal(_isIP('200.255.0.255'), true); - assert.equal(_isIP('200.255.01.255'), true); - assert.equal(_isIP('200.255.10.255'), true); - assert.equal(_isIP('200.255.256.255'), false); - assert.equal(_isIP('200.255.001.255'), true); - - assert.equal(_isIP('200.255.255.0'), true); - assert.equal(_isIP('200.255.255.01'), true); - assert.equal(_isIP('200.255.255.10'), true); - assert.equal(_isIP('200.255.255.256'), false); - assert.equal(_isIP('200.255.255.001'), true); - - // excetion - assert.equal(_isIP('200.255.255.001'), true); - assert.equal(_isIP('200'), false); - assert.equal(_isIP('200.1'), false); - assert.equal(_isIP('200.1.1'), false); - assert.equal(_isIP('200.1.1.1.1'), false); - }); - it('ipv6 test', () => { - assert.equal(_isIP('1:2:3:4:5:6:7::'), true); - assert.equal(_isIP('1:2:3:4:5:6:7:8'), true); - assert.equal(_isIP('1:2:3:4:5:6::'), true); - assert.equal(_isIP('1:2:3:4:5:6::8'), true); - assert.equal(_isIP('1:2:3:4:5::'), true); - assert.equal(_isIP('1:2:3:4:5::8'), true); - assert.equal(_isIP('1:2:3:4::'), true); - assert.equal(_isIP('1:2:3:4::8'), true); - assert.equal(_isIP('1:2:3::'), true); - assert.equal(_isIP('1:2:3::8'), true); - assert.equal(_isIP('1:2::'), true); - assert.equal(_isIP('1:2::8'), true); - assert.equal(_isIP('1::'), true); - assert.equal(_isIP('1::8'), true); - assert.equal(_isIP('::'), true); - assert.equal(_isIP('::8'), true); - assert.equal(_isIP('::7:8'), true); - assert.equal(_isIP('::6:7:8'), true); - assert.equal(_isIP('::5:6:7:8'), true); - assert.equal(_isIP('::4:5:6:7:8'), true); - assert.equal(_isIP('::3:4:5:6:7:8'), true); - assert.equal(_isIP('::2:3:4:5:6:7:8'), true); - assert.equal(_isIP('A:0f:0F:FFFF:5:6:7:8'), true); - assert.equal(_isIP('A:0f:0F:FFFF1:5:6:7:8'), false); - assert.equal(_isIP('G:0f:0F:FFFF:5:6:7:8'), false); - }); -}); - -describe('test/includesConf.js', () => { - it('shoud return true when conf-item is primitive value', () => { - const data = { - testNum: 1, - testStr: '2', - testUndefined: undefined, - testNull: null, - testExtral: 'extral', - }; - const conf = { - testNum: 1, - testStr: '2', - testUndefined: undefined, - testNull: null, - }; - assert(includesConf(data, conf)); - }); - it('shoud return false when conf-item is primitive value and conf not in data', () => { - const data = { - testNum: 1, - testStr: '2', - testUndefined: undefined, - testNull: null, - testExtral: 'extral', - }; - const conf = { - testNonExist: 1, - }; - const conf1 = { - testExtral: 'test', - }; - assert(!includesConf(data, conf)); - assert(!includesConf(data, conf1)); - }); - it('shoud return true when conf-item is simple Array', () => { - const data = { - testArray1: [ 'extral', '1', 0, undefined ], - testExtral: 'extral', - }; - const conf = { - testArray1: [ '1', 0, undefined ], - }; - assert(includesConf(data, conf)); - }); - it('shoud return false when conf-item is simple Array and conf not in data', () => { - const data = { - testArray1: [ 'extral', '1', 0, undefined ], - testExtral: 'extral', - }; - const conf = { - testArray1: [ '1', 0, undefined, 'noexist' ], - }; - assert(!includesConf(data, conf)); - }); - it('shoud return true when conf-item is simple Object', () => { - const data = { - testObject: { test: 1, test1: 2 }, - testExtral: 'extral', - }; - const conf = { - testObject: { test: 1 }, - }; - assert(includesConf(data, conf)); - }); - it('shoud return false when conf-item is simple Object and conf not in data', () => { - const data = { - testObject: { test: 1, test1: 2 }, - testExtral: 'extral', - }; - const conf = { - testObject: { test: 1, noExist: 'test' }, - }; - assert(!includesConf(data, conf)); - }); - it('shoud return true when conf-item is complex Array', () => { - const data = { - testArray: [{ test: 1, test1: 2 }, { test: 2 }], - testExtral: 'extral', - }; - const conf = { - testArray: [{ test: 2 }], - }; - assert(includesConf(data, conf)); - }); - it('shoud return false when conf-item is complex Array and conf not in data', () => { - const data = { - testArray: [{ test: 1, test1: 2 }, { test: 2 }], - testExtral: 'extral', - }; - const conf = { - testArray: [{ test: 0 }], - }; - assert(!includesConf(data, conf)); - }); - it('shoud return true when conf-item is complex Object', () => { - const data = { - testObject: { - test01: { - test11: { - a: 1, - }, - test12: 1123, - }, - test02: [{ test11: 1 }, '123', 0, undefined, '456' ], - }, - testExtral: 'extral', - }; - const conf = { - testObject: { - test01: { - test11: { - a: 1, - }, - }, - test02: [{ test11: 1 }, '123', 0, undefined ], - }, - }; - assert(includesConf(data, conf)); - }); - it('shoud return false when conf-item is complex Object and conf not in data', () => { - const data = { - testObject: { - test01: { - test11: { - a: 1, - }, - test12: 1123, - }, - test02: [{ test11: 1 }, '123', 0, undefined, '456' ], - }, - testExtral: 'extral', - }; - const conf = { - testObject: { - test01: { - test11: { - a: 1, - b: 'test cpx', - }, - }, - test02: [{ test11: 1 }, '123', 0, undefined ], - }, - }; - assert(!includesConf(data, conf)); - }); -}); diff --git a/test/utils/checkConfigValid.test.js b/test/utils/checkConfigValid.test.js deleted file mode 100644 index 6db743bd8..000000000 --- a/test/utils/checkConfigValid.test.js +++ /dev/null @@ -1,55 +0,0 @@ -const assert = require('assert'); -const { checkConfigValid } = require('../../lib/common/utils/checkConfigValid'); - -describe('test/utils/checkConfigValid.test.js', () => { - describe('endpoint', () => { - it('should success when endpoint is valid', () => { - try { - const endpoint = 'testa_-.com'; - checkConfigValid(endpoint, 'endpoint'); - assert(true); - } catch (error) { - assert(false); - } - }); - it('should throw when endpoint includes invalid character', () => { - const errorStr = '中~!@#$%^&*()+={}[]|\\";\',<>?'; - errorStr.split('').map(_ => `test-a_b.${_}.com`).forEach( - str => { - try { - checkConfigValid(str, 'endpoint'); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - } - ); - }); - }); - - describe('region', () => { - it('should success when region is valid', () => { - try { - const region = 'oss-cn-hangzhou'; - checkConfigValid(region, 'region'); - assert(true); - } catch (error) { - assert(false); - } - }); - it('should throw when region includes invalid character', () => { - const errorStr = '中~!@#$%^&*()+={}[]|\\";\',<>?'; - errorStr.split('').map(_ => `oss-${_}hangzhou`).forEach( - str => { - try { - checkConfigValid(str, 'region'); - assert(false); - } catch (error) { - assert(error.message.includes('region')); - } - } - ); - }); - }); - -}); diff --git a/test/utils/deepCopy.test.js b/test/utils/deepCopy.test.js deleted file mode 100644 index b73863c17..000000000 --- a/test/utils/deepCopy.test.js +++ /dev/null @@ -1,47 +0,0 @@ -const assert = require('assert'); -const { deepCopy, deepCopyWith } = require('../../lib/common/utils/deepCopy'); - -describe('utils/deepCopy()', () => { - it('should copy big Buffers correctly', () => { - // 2^30 - 1 ~ 1GB is max size on 32-bit computer - // See https://nodejs.org/api/buffer.html#buffer_buffer_constants_max_length - const numberBytes = Math.pow(2, 30) - 1; - const obj = { - buffer: Buffer.alloc(numberBytes), - }; - const copy = deepCopy(obj); - assert.strictEqual(Object.keys(obj).length, Object.keys(copy).length); - assert(obj.buffer.equals(copy.buffer)); - }); - - it('should skip some properties when use deepCopyWith', () => { - const numberBytes = Math.pow(2, 30) - 1; - const obj = { - a: 1, - b: { - c: 2, - }, - buffer: Buffer.alloc(numberBytes), - }; - const copy1 = deepCopyWith(obj, (_, key) => { - if (key === 'buffer') return null; - }); - assert.deepStrictEqual(copy1, { - a: 1, - b: { - c: 2, - }, - buffer: null, - }); - - const copy2 = deepCopyWith(obj); - assert.deepStrictEqual(obj.a, copy2.a); - assert.deepStrictEqual(obj.b, copy2.b); - assert(obj.buffer.equals(copy2.buffer)); - - const copy3 = deepCopyWith(obj, () => {}); - assert.deepStrictEqual(obj.a, copy3.a); - assert.deepStrictEqual(obj.b, copy3.b); - assert(obj.buffer.equals(copy3.buffer)); - }); -}); diff --git a/test/utils/omit.test.js b/test/utils/omit.test.js deleted file mode 100644 index 9ea7d1694..000000000 --- a/test/utils/omit.test.js +++ /dev/null @@ -1,28 +0,0 @@ -const assert = require('assert'); -const { omit } = require('../../lib/common/utils/omit'); - -describe('omit test case', () => { - const originObject = { - name: 'man', - age: '38', - sex: 'male', - children: { - name: 'child', - age: '18', - }, - }; - - it('should return new object', () => { - const newObject = omit(originObject, []); - assert(newObject !== originObject); - }); - it('should remove properties', () => { - const newObject = omit(originObject, [ 'age' ]); - assert.equal(newObject.age, undefined); - }); - it('should not remove children node name', () => { - const newObject = omit(originObject, [ 'name' ]); - assert.equal(newObject.name, undefined); - assert.equal(newObject.children.name, 'child'); - }); -}); diff --git a/test/utils/retry.test.js b/test/utils/retry.test.js deleted file mode 100644 index 7e42102e8..000000000 --- a/test/utils/retry.test.js +++ /dev/null @@ -1,126 +0,0 @@ -const assert = require('assert'); -const { md5 } = require('utility'); -const mm = require('mm'); -const fs = require('fs'); -const OSS = require('../..'); -const config = require('../config').oss; -const utils = require('../utils'); - -describe('test/retry.test.js', () => { - let store; - const RETRY_MAX = 3; - let testRetryCount = 0; - const bucket = config.bucket; - before(async () => { - store = new OSS({ - ...config, - retryMax: RETRY_MAX, - }); - store.useBucket(bucket); - }); - beforeEach(() => { - testRetryCount = 0; - const originRequest = store.urllib.request; - mm(store.urllib, 'request', async (url, params) => { - if (testRetryCount < RETRY_MAX) { - testRetryCount++; - const e = new Error('net error'); - e.status = -1; - e.headers = {}; - throw e; - } else { - return await originRequest(url, params); - } - }); - }); - afterEach(() => { - mm.restore(); - }); - - it.skip('set retryMax to test request auto retry when networkError or timeout', async () => { - const res = await store.listBuckets(); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - }); - - it('should throw when retry count bigger than options retryMax', async () => { - mm.error(store.urllib, 'request', { - status: -1, // timeout - headers: {}, - }); - try { - await store.listBuckets(); - assert(false, 'should throw error'); - } catch (error) { - assert(error.status === -1); - } - }); - - it('should not retry when err.status is not -1 or -2', async () => { - mm.error(store.urllib, 'request', { - status: -3, - headers: {}, - }); - try { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - await store.put(name, fileName); - assert(false, 'should throw error'); - } catch (error) { - assert.strictEqual(error.status, -3); - } - }); - - it('should succeed when put with filename', async () => { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - const res = await store.put(name, fileName); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - const onlineFile = await store.get(name); - assert.strictEqual(md5(fs.readFileSync(fileName)), md5(onlineFile.content)); - }); - - it('should succeed when multipartUpload with filename', async () => { - mm.restore(); - const originRequest = store.urllib.request; - const UPLOAD_PART_SEQ = 1; - let CurrentRequsetTimer = 0; - mm(store.urllib, 'request', async (url, params) => { - // skip mock when initMultipartUpload - if (CurrentRequsetTimer < UPLOAD_PART_SEQ) { - CurrentRequsetTimer++; - return originRequest(url, params); - } - // mock net error when upload part - if (testRetryCount < RETRY_MAX) { - testRetryCount++; - const e = new Error('net error'); - e.status = -1; - e.headers = {}; - throw e; - } else { - return originRequest(url, params); - } - }); - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1.5 * 1024 * 1024); - const res = await store.multipartUpload(name, fileName); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - const onlineFile = await store.get(name); - assert.strictEqual(onlineFile.content.length, 1.5 * 1024 * 1024); - assert.strictEqual(md5(fs.readFileSync(fileName)), md5(onlineFile.content)); - }); - - it('should fail when put with stream', async () => { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - try { - await store.put(name, fs.createReadStream(fileName)); - assert(false, 'should not reach here'); - } catch (e) { - assert.strictEqual(e.status, -1); - } - }); -}); diff --git a/tsconfig.json b/tsconfig.json index e99e989dc..ff41b7342 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,6 +1,10 @@ { "extends": "@eggjs/tsconfig", "compilerOptions": { - "target": "ESNext" + "strict": true, + "noImplicitAny": true, + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext" } }