Add Azure Blob Storage integration (#3)
* azure-features: added azure blob support for meet * azure-feature: removed variables of .env files * azure-features: added azure blob support for meet * azure-feature: removed variables of .env files * azure-features: fix to pass the boundary ranges test * ci update Azure storage configuration in egress.yaml using yq * ci: update yq command for modifying egress.yaml to use correct syntax * ci: add logging for current storage provider in backend integration tests * ci: update conditional syntax for Azure storage provider in backend integration tests * feature-azure: modified package-lock * ci: remove logging of current storage provider in backend integration tests * ci: add pre-startup script execution for Azure setup in backend integration tests * fix: streamline pre-startup command execution for Azure storage provider * fix: update pre-startup command script for Azure storage provider * fix: improve pre-startup command script for Azure storage provider * fix: remove commented instruction for modifying egress.yaml in Azure setup * fix: streamline pre-startup command execution in backend integration tests * fix: correct command execution syntax in backend integration tests * fix: add container name for Azure storage provider configuration * fix: add container name and environment variables for Azure storage configuration * ci: enhance recordings API tests to support Azure storage provider configuration * ci: add support for Azure storage provider in webhook, security, global preferences, participants, meetings, and users API tests * ci: add Azure container names for various API tests in backend integration workflow * fix: update Azure storage container names for various API tests * backend: fix - ensure all recordings are deleted after room security tests * ci: remove MEET_WEBHOOK_ENABLED environment variable from OpenVidu Meet setup in all jobs * backend: refactor storage services exports * backend: update Azure Blob Storage references and error messages for consistency * ci: add matrix strategy for Rooms API Tests to support multiple storage providers * backend: rename ABS services for consistency * backend: ensure maxResults is a number in listObjectsPaginated method * ci: update storage provider from azure to abs in integration tests --------- Co-authored-by: Piwccle <sergiosergi11@hotmail.com> Co-authored-by: Carlos Santos <4a.santos@gmail.com>
This commit is contained in:
parent
b059acb159
commit
98764597e2
248
.github/workflows/backend-integration-test.yaml
vendored
248
.github/workflows/backend-integration-test.yaml
vendored
@ -37,6 +37,10 @@ jobs:
|
||||
test-rooms:
|
||||
name: Rooms API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -46,13 +50,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-rooms"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-rooms"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-rooms
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-rooms"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -70,6 +101,10 @@ jobs:
|
||||
needs: start-aws-runner
|
||||
if: ${{ always() && (needs.start-aws-runner.result == 'success' || needs.start-aws-runner.result == 'skipped') }}
|
||||
runs-on: ${{ needs.start-aws-runner.outputs.label || 'ov-actions-runner' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
@ -80,13 +115,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-recordings"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-recordings"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-recordings
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-recordings"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -102,6 +164,10 @@ jobs:
|
||||
test-webhooks:
|
||||
name: Webhook Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -111,13 +177,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-webhooks"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-webhooks"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-webhooks
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-webhooks"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -133,6 +226,10 @@ jobs:
|
||||
test-security:
|
||||
name: Security API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -142,13 +239,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-security"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-security"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-security
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-security"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -164,6 +288,10 @@ jobs:
|
||||
test-global-preferences:
|
||||
name: Global Preferences API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -173,13 +301,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-global-preferences"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-global-preferences"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-global-preferences
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-global-preferences"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -195,6 +350,10 @@ jobs:
|
||||
test-participants:
|
||||
name: Participants API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -204,13 +363,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-participants"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-participants"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-participants
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-participants"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -226,6 +412,10 @@ jobs:
|
||||
test-meetings:
|
||||
name: Meetings API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -235,13 +425,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-meetings"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-meetings"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-meetings
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-meetings"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
@ -257,6 +474,10 @@ jobs:
|
||||
test-users:
|
||||
name: Users API Tests
|
||||
runs-on: ov-actions-runner
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
storage-provider: [s3, abs]
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@ -266,13 +487,40 @@ jobs:
|
||||
run: curl -sSL https://get.livekit.io/cli | bash
|
||||
- name: Setup OpenVidu Local Deployment
|
||||
uses: OpenVidu/actions/start-openvidu-local-deployment@main
|
||||
with:
|
||||
ref-openvidu-local-deployment: development
|
||||
pre_startup_commands: |
|
||||
cat <<'BASH' > pre_startup_commands.sh
|
||||
#!/bin/bash
|
||||
if [[ "${{ matrix['storage-provider'] }}" == "abs" ]]; then
|
||||
echo "Using Azure storage provider"
|
||||
yq e -i '
|
||||
del(.storage.s3) |
|
||||
.storage.azure = {
|
||||
"account_name": "${{ vars.MEET_AZURE_ACCOUNT_NAME }}",
|
||||
"account_key": "${{ secrets.MEET_AZURE_ACCOUNT_KEY }}",
|
||||
"container_name": "openvidu-appdata-users"
|
||||
}
|
||||
' egress.yaml
|
||||
fi
|
||||
BASH
|
||||
chmod +x pre_startup_commands.sh && ./pre_startup_commands.sh
|
||||
- name: Setup OpenVidu Meet
|
||||
uses: OpenVidu/actions/start-openvidu-meet@main
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-users"
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:integration-users
|
||||
env:
|
||||
MEET_PREFERENCES_STORAGE_MODE: ${{ matrix.storage-provider }}
|
||||
MEET_AZURE_ACCOUNT_NAME: ${{ vars.MEET_AZURE_ACCOUNT_NAME }}
|
||||
MEET_AZURE_ACCOUNT_KEY: ${{ secrets.MEET_AZURE_ACCOUNT_KEY }}
|
||||
MEET_AZURE_CONTAINER_NAME: "openvidu-appdata-users"
|
||||
JEST_JUNIT_OUTPUT_DIR: './reports/'
|
||||
- name: Publish Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
|
||||
366
backend/package-lock.json
generated
366
backend/package-lock.json
generated
@ -10,6 +10,7 @@
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "3.673.0",
|
||||
"@azure/storage-blob": "^12.27.0",
|
||||
"@sesamecare-oss/redlock": "1.4.0",
|
||||
"bcrypt": "5.1.1",
|
||||
"chalk": "5.4.1",
|
||||
@ -1096,6 +1097,215 @@
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/abort-controller": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz",
|
||||
"integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-auth": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.9.0.tgz",
|
||||
"integrity": "sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-util": "^1.11.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-client": {
|
||||
"version": "1.9.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.9.4.tgz",
|
||||
"integrity": "sha512-f7IxTD15Qdux30s2qFARH+JxgwxWLG2Rlr4oSkPGuLWm+1p5y1+C04XGLA0vmX6EtqfutmjvpNmAfgwVIS5hpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-auth": "^1.4.0",
|
||||
"@azure/core-rest-pipeline": "^1.20.0",
|
||||
"@azure/core-tracing": "^1.0.0",
|
||||
"@azure/core-util": "^1.6.1",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http-compat": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.3.0.tgz",
|
||||
"integrity": "sha512-qLQujmUypBBG0gxHd0j6/Jdmul6ttl24c8WGiLXIk7IHXdBlfoBqW27hyz3Xn6xbfdyVSarl1Ttbk0AwnZBYCw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-client": "^1.3.0",
|
||||
"@azure/core-rest-pipeline": "^1.20.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-lro": {
|
||||
"version": "2.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz",
|
||||
"integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-util": "^1.2.0",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-paging": {
|
||||
"version": "1.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz",
|
||||
"integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-rest-pipeline": {
|
||||
"version": "1.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.21.0.tgz",
|
||||
"integrity": "sha512-a4MBwe/5WKbq9MIxikzgxLBbruC5qlkFYlBdI7Ev50Y7ib5Vo/Jvt5jnJo7NaWeJ908LCHL0S1Us4UMf1VoTfg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-auth": "^1.8.0",
|
||||
"@azure/core-tracing": "^1.0.1",
|
||||
"@azure/core-util": "^1.11.0",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"@typespec/ts-http-runtime": "^0.2.3",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-tracing": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz",
|
||||
"integrity": "sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-util": {
|
||||
"version": "1.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.12.0.tgz",
|
||||
"integrity": "sha512-13IyjTQgABPARvG90+N2dXpC+hwp466XCdQXPCRlbWHgd3SJd5Q1VvaBGv6k1BIa4MQm6hAF1UBU1m8QUxV8sQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@typespec/ts-http-runtime": "^0.2.2",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-xml": {
|
||||
"version": "1.4.5",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.5.tgz",
|
||||
"integrity": "sha512-gT4H8mTaSXRz7eGTuQyq1aIJnJqeXzpOe9Ay7Z3FrCouer14CbV3VzjnJrNrQfbBpGBLO9oy8BmrY75A0p53cA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-xml-parser": "^5.0.7",
|
||||
"tslib": "^2.8.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-xml/node_modules/fast-xml-parser": {
|
||||
"version": "5.2.5",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.2.5.tgz",
|
||||
"integrity": "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"strnum": "^2.1.0"
|
||||
},
|
||||
"bin": {
|
||||
"fxparser": "src/cli/cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-xml/node_modules/strnum": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.1.tgz",
|
||||
"integrity": "sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@azure/logger": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.2.0.tgz",
|
||||
"integrity": "sha512-0hKEzLhpw+ZTAfNJyRrn6s+V0nDWzXk9OjBr2TiGIu0OfMr5s2V4FpKLTAK3Ca5r5OKLbf4hkOGDPyiRjie/jA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typespec/ts-http-runtime": "^0.2.2",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/storage-blob": {
|
||||
"version": "12.27.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.27.0.tgz",
|
||||
"integrity": "sha512-IQjj9RIzAKatmNca3D6bT0qJ+Pkox1WZGOg2esJF2YLHb45pQKOwGPIAV+w3rfgkj7zV3RMxpn/c6iftzSOZJQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.1.2",
|
||||
"@azure/core-auth": "^1.4.0",
|
||||
"@azure/core-client": "^1.6.2",
|
||||
"@azure/core-http-compat": "^2.0.0",
|
||||
"@azure/core-lro": "^2.2.0",
|
||||
"@azure/core-paging": "^1.1.1",
|
||||
"@azure/core-rest-pipeline": "^1.10.1",
|
||||
"@azure/core-tracing": "^1.1.2",
|
||||
"@azure/core-util": "^1.6.1",
|
||||
"@azure/core-xml": "^1.4.3",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"events": "^3.0.0",
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
@ -3046,6 +3256,31 @@
|
||||
"node-pre-gyp": "bin/node-pre-gyp"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/https-proxy-agent": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@nestjs/axios": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@nestjs/axios/-/axios-4.0.0.tgz",
|
||||
@ -4764,6 +4999,20 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typespec/ts-http-runtime": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@typespec/ts-http-runtime/-/ts-http-runtime-0.2.3.tgz",
|
||||
"integrity": "sha512-oRhjSzcVjX8ExyaF8hC0zzTqxlVuRlgMHL/Bh4w3xB9+wjbm0FpXylVU/lBrn+kgphwYTrOk3tp+AVShGmlYCg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"http-proxy-agent": "^7.0.0",
|
||||
"https-proxy-agent": "^7.0.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@zodios/core": {
|
||||
"version": "10.9.6",
|
||||
"resolved": "https://registry.npmjs.org/@zodios/core/-/core-10.9.6.tgz",
|
||||
@ -4831,15 +5080,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6.0.0"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv": {
|
||||
@ -5377,9 +5623,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@ -7043,6 +7289,15 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
|
||||
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.8.x"
|
||||
}
|
||||
},
|
||||
"node_modules/execa": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
|
||||
@ -7472,9 +7727,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/filelist/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@ -7868,9 +8123,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@ -8061,7 +8316,6 @@
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.0",
|
||||
@ -8071,27 +8325,17 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/http-proxy-agent/node_modules/agent-base": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/human-signals": {
|
||||
@ -11567,30 +11811,6 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/pac-proxy-agent/node_modules/agent-base": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/pac-proxy-agent/node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/pac-resolver": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz",
|
||||
@ -11968,30 +12188,6 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-agent/node_modules/agent-base": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-agent/node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-agent/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
@ -12674,16 +12870,6 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/socks-proxy-agent/node_modules/agent-base": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
|
||||
@ -53,6 +53,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "3.673.0",
|
||||
"@sesamecare-oss/redlock": "1.4.0",
|
||||
"@azure/storage-blob": "^12.27.0",
|
||||
"bcrypt": "5.1.1",
|
||||
"chalk": "5.4.1",
|
||||
"cookie-parser": "1.4.7",
|
||||
@ -106,4 +107,4 @@
|
||||
"outputDirectory": "test-results",
|
||||
"outputName": "junit.xml"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,8 @@
|
||||
import { Container } from 'inversify';
|
||||
import { MEET_PREFERENCES_STORAGE_MODE } from '../environment.js';
|
||||
import {
|
||||
ABSService,
|
||||
ABSStorageProvider,
|
||||
AuthService,
|
||||
LiveKitService,
|
||||
LivekitWebhookService,
|
||||
@ -11,6 +14,7 @@ import {
|
||||
RecordingService,
|
||||
RedisService,
|
||||
RoomService,
|
||||
S3KeyBuilder,
|
||||
S3Service,
|
||||
S3StorageProvider,
|
||||
StorageFactory,
|
||||
@ -21,16 +25,12 @@ import {
|
||||
TokenService,
|
||||
UserService
|
||||
} from '../services/index.js';
|
||||
import { MEET_PREFERENCES_STORAGE_MODE } from '../environment.js';
|
||||
import { S3KeyBuilder } from '../services/storage/providers/s3/s3-storage-key.builder.js';
|
||||
|
||||
export const container: Container = new Container();
|
||||
|
||||
export const STORAGE_TYPES = {
|
||||
StorageProvider: Symbol.for('StorageProvider'),
|
||||
KeyBuilder: Symbol.for('KeyBuilder'),
|
||||
S3StorageProvider: Symbol.for('S3StorageProvider'),
|
||||
S3KeyBuilder: Symbol.for('S3KeyBuilder')
|
||||
KeyBuilder: Symbol.for('KeyBuilder')
|
||||
};
|
||||
|
||||
/**
|
||||
@ -50,8 +50,6 @@ export const registerDependencies = () => {
|
||||
container.bind(TaskSchedulerService).toSelf().inSingletonScope();
|
||||
|
||||
configureStorage(MEET_PREFERENCES_STORAGE_MODE);
|
||||
container.bind(S3Service).toSelf().inSingletonScope();
|
||||
container.bind(S3StorageProvider).toSelf().inSingletonScope();
|
||||
container.bind(StorageFactory).toSelf().inSingletonScope();
|
||||
container.bind(MeetStorageService).toSelf().inSingletonScope();
|
||||
|
||||
@ -75,6 +73,14 @@ const configureStorage = (storageMode: string) => {
|
||||
case 's3':
|
||||
container.bind<StorageProvider>(STORAGE_TYPES.StorageProvider).to(S3StorageProvider).inSingletonScope();
|
||||
container.bind<StorageKeyBuilder>(STORAGE_TYPES.KeyBuilder).to(S3KeyBuilder).inSingletonScope();
|
||||
container.bind(S3Service).toSelf().inSingletonScope();
|
||||
container.bind(S3StorageProvider).toSelf().inSingletonScope();
|
||||
break;
|
||||
case 'abs':
|
||||
container.bind<StorageProvider>(STORAGE_TYPES.StorageProvider).to(ABSStorageProvider).inSingletonScope();
|
||||
container.bind<StorageKeyBuilder>(STORAGE_TYPES.KeyBuilder).to(S3KeyBuilder).inSingletonScope();
|
||||
container.bind(ABSService).toSelf().inSingletonScope();
|
||||
container.bind(ABSStorageProvider).toSelf().inSingletonScope();
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
@ -42,7 +42,7 @@ export const {
|
||||
LIVEKIT_API_KEY = 'devkey',
|
||||
LIVEKIT_API_SECRET = 'secret',
|
||||
|
||||
MEET_PREFERENCES_STORAGE_MODE = 's3',
|
||||
MEET_PREFERENCES_STORAGE_MODE = 's3', // Options: 's3', 'abs'
|
||||
|
||||
// S3 configuration
|
||||
MEET_S3_BUCKET = 'openvidu-appdata',
|
||||
@ -53,6 +53,12 @@ export const {
|
||||
MEET_AWS_REGION = 'us-east-1',
|
||||
MEET_S3_WITH_PATH_STYLE_ACCESS = 'true',
|
||||
|
||||
//Azure Blob storage configuration
|
||||
MEET_AZURE_CONTAINER_NAME = 'openvidu-appdata',
|
||||
MEET_AZURE_SUBCONATAINER_NAME = 'openvidu-meet',
|
||||
MEET_AZURE_ACCOUNT_NAME = '',
|
||||
MEET_AZURE_ACCOUNT_KEY = '',
|
||||
|
||||
// Redis configuration
|
||||
MEET_REDIS_HOST: REDIS_HOST = 'localhost',
|
||||
MEET_REDIS_PORT: REDIS_PORT = 6379,
|
||||
@ -114,15 +120,26 @@ export const logEnvVars = () => {
|
||||
console.log('LIVEKIT API SECRET: ', credential('****' + LIVEKIT_API_SECRET.slice(-3)));
|
||||
console.log('LIVEKIT API KEY: ', credential('****' + LIVEKIT_API_KEY.slice(-3)));
|
||||
console.log('---------------------------------------------------------');
|
||||
console.log('S3 Configuration');
|
||||
console.log('---------------------------------------------------------');
|
||||
console.log('MEET S3 BUCKET:', text(MEET_S3_BUCKET));
|
||||
console.log('MEET S3 SERVICE ENDPOINT:', text(MEET_S3_SERVICE_ENDPOINT));
|
||||
console.log('MEET S3 ACCESS KEY:', credential('****' + MEET_S3_ACCESS_KEY.slice(-3)));
|
||||
console.log('MEET S3 SECRET KEY:', credential('****' + MEET_S3_SECRET_KEY.slice(-3)));
|
||||
console.log('MEET AWS REGION:', text(MEET_AWS_REGION));
|
||||
console.log('MEET S3 WITH PATH STYLE ACCESS:', text(MEET_S3_WITH_PATH_STYLE_ACCESS));
|
||||
console.log('---------------------------------------------------------');
|
||||
|
||||
if (MEET_PREFERENCES_STORAGE_MODE === 's3') {
|
||||
console.log('S3 Configuration');
|
||||
console.log('---------------------------------------------------------');
|
||||
console.log('MEET S3 BUCKET:', text(MEET_S3_BUCKET));
|
||||
console.log('MEET S3 SERVICE ENDPOINT:', text(MEET_S3_SERVICE_ENDPOINT));
|
||||
console.log('MEET S3 ACCESS KEY:', credential('****' + MEET_S3_ACCESS_KEY.slice(-3)));
|
||||
console.log('MEET S3 SECRET KEY:', credential('****' + MEET_S3_SECRET_KEY.slice(-3)));
|
||||
console.log('MEET AWS REGION:', text(MEET_AWS_REGION));
|
||||
console.log('MEET S3 WITH PATH STYLE ACCESS:', text(MEET_S3_WITH_PATH_STYLE_ACCESS));
|
||||
console.log('---------------------------------------------------------');
|
||||
} else if (MEET_PREFERENCES_STORAGE_MODE === 'abs') {
|
||||
console.log('Azure Blob Storage Configuration');
|
||||
console.log('---------------------------------------------------------');
|
||||
console.log('MEET AZURE ACCOUNT NAME:', text(MEET_AZURE_ACCOUNT_NAME));
|
||||
console.log('MEET AZURE ACCOUNT KEY:', credential('****' + MEET_AZURE_ACCOUNT_KEY.slice(-3)));
|
||||
console.log('MEET AZURE CONTAINER NAME:', text(MEET_AZURE_CONTAINER_NAME));
|
||||
console.log('---------------------------------------------------------');
|
||||
}
|
||||
|
||||
console.log('Redis Configuration');
|
||||
console.log('---------------------------------------------------------');
|
||||
console.log('REDIS HOST:', text(REDIS_HOST));
|
||||
|
||||
@ -58,6 +58,10 @@ export const errorS3NotAvailable = (error: any): OpenViduMeetError => {
|
||||
return new OpenViduMeetError('S3 Error', `S3 is not available ${error}`, 503);
|
||||
};
|
||||
|
||||
export const errorAzureNotAvailable = (error: any): OpenViduMeetError => {
|
||||
return new OpenViduMeetError('ABS Error', `Azure Blob Storage is not available ${error}`, 503);
|
||||
};
|
||||
|
||||
// Auth errors
|
||||
|
||||
export const errorInvalidCredentials = (): OpenViduMeetError => {
|
||||
|
||||
@ -4,7 +4,6 @@ export * from './system-event.service.js';
|
||||
export * from './mutex.service.js';
|
||||
export * from './task-scheduler.service.js';
|
||||
|
||||
export * from './storage/providers/s3/s3.service.js';
|
||||
export * from './storage/index.js';
|
||||
|
||||
export * from './token.service.js';
|
||||
|
||||
@ -2,4 +2,8 @@ export * from './storage.interface.js';
|
||||
export * from './storage.factory.js';
|
||||
export * from './storage.service.js';
|
||||
|
||||
export * from './providers/s3/s3.service.js';
|
||||
export * from './providers/s3/s3-storage-key.builder.js';
|
||||
export * from './providers/s3/s3-storage.provider.js';
|
||||
export * from './providers/abs/abs.service.js';
|
||||
export * from './providers/abs/abs-storage.provider.js';
|
||||
|
||||
@ -0,0 +1,151 @@
|
||||
import { inject, injectable } from 'inversify';
|
||||
import { Readable } from 'stream';
|
||||
import { ABSService, LoggerService } from '../../../index.js';
|
||||
import { StorageProvider } from '../../storage.interface.js';
|
||||
|
||||
/**
|
||||
* Basic Azure Blob Storage provider that implements only primitive storage operations.
|
||||
*/
|
||||
@injectable()
|
||||
export class ABSStorageProvider implements StorageProvider {
|
||||
constructor(
|
||||
@inject(LoggerService) protected logger: LoggerService,
|
||||
@inject(ABSService) protected azureBlobService: ABSService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Retrieves an object from ABS as a JSON object.
|
||||
*/
|
||||
async getObject<T = Record<string, unknown>>(key: string): Promise<T | null> {
|
||||
try {
|
||||
this.logger.debug(`Getting object from ABS: ${key}`);
|
||||
const result = await this.azureBlobService.getObjectAsJson(key);
|
||||
return result as T;
|
||||
} catch (error) {
|
||||
this.logger.debug(`Object not found in ABS: ${key}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores an object in ABS as JSON.
|
||||
*/
|
||||
async putObject<T = Record<string, unknown>>(key: string, data: T): Promise<void> {
|
||||
try {
|
||||
this.logger.debug(`Storing object in ABS: ${key}`);
|
||||
await this.azureBlobService.saveObject(key, data as Record<string, unknown>);
|
||||
this.logger.verbose(`Successfully stored object in ABS: ${key}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Error storing object in ABS ${key}: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a single object from ABS.
|
||||
*/
|
||||
async deleteObject(key: string): Promise<void> {
|
||||
try {
|
||||
this.logger.debug(`Deleting object from ABS: ${key}`);
|
||||
await this.azureBlobService.deleteObjects([key]);
|
||||
this.logger.verbose(`Successfully deleted object from ABS: ${key}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Error deleting object from ABS ${key}: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes multiple objects from ABS.
|
||||
*/
|
||||
async deleteObjects(keys: string[]): Promise<void> {
|
||||
try {
|
||||
this.logger.debug(`Deleting ${keys.length} objects from ABS`);
|
||||
await this.azureBlobService.deleteObjects(keys);
|
||||
this.logger.verbose(`Successfully deleted ${keys.length} objects from ABS`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Error deleting objects from ABS: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an object exists in ABS.
|
||||
*/
|
||||
async exists(key: string): Promise<boolean> {
|
||||
try {
|
||||
this.logger.debug(`Checking if object exists in ABS: ${key}`);
|
||||
return await this.azureBlobService.exists(key);
|
||||
} catch (error) {
|
||||
this.logger.debug(`Error checking object existence in ABS ${key}: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists objects in ABS with a given prefix.
|
||||
*/
|
||||
async listObjects(
|
||||
prefix: string,
|
||||
maxItems?: number,
|
||||
continuationToken?: string
|
||||
): Promise<{
|
||||
Contents?: Array<{
|
||||
Key?: string;
|
||||
LastModified?: Date;
|
||||
Size?: number;
|
||||
ETag?: string;
|
||||
}>;
|
||||
IsTruncated?: boolean;
|
||||
NextContinuationToken?: string;
|
||||
}> {
|
||||
try {
|
||||
this.logger.debug(`Listing objects in ABS with prefix: ${prefix}`);
|
||||
const response = await this.azureBlobService.listObjectsPaginated(prefix, maxItems, continuationToken);
|
||||
const contents = response.items.map((blob) => ({
|
||||
Key: blob.name,
|
||||
LastModified: blob.properties.lastModified,
|
||||
Size: blob.properties.contentLength,
|
||||
ETag: blob.properties.etag
|
||||
})) as object[];
|
||||
return {
|
||||
Contents: contents,
|
||||
IsTruncated: response.isTruncated,
|
||||
NextContinuationToken: response.continuationToken
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(`Error listing objects in ABS with prefix ${prefix}: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves metadata headers for an object in ABS.
|
||||
*/
|
||||
async getObjectHeaders(key: string): Promise<{ contentLength?: number; contentType?: string }> {
|
||||
try {
|
||||
this.logger.debug(`Getting object headers from ABS: ${key}`);
|
||||
const data = await this.azureBlobService.getObjectHeaders(key);
|
||||
return {
|
||||
contentLength: data.ContentLength,
|
||||
contentType: data.ContentType
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(`Error fetching object headers from ABS ${key}: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves an object from ABS as a readable stream.
|
||||
*/
|
||||
async getObjectAsStream(key: string, range?: { start: number; end: number }): Promise<Readable> {
|
||||
try {
|
||||
this.logger.debug(`Getting object stream from ABS: ${key}`);
|
||||
return await this.azureBlobService.getObjectAsStream(key, range);
|
||||
} catch (error) {
|
||||
this.logger.error(`Error fetching object stream from ABS ${key}: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
290
backend/src/services/storage/providers/abs/abs.service.ts
Normal file
290
backend/src/services/storage/providers/abs/abs.service.ts
Normal file
@ -0,0 +1,290 @@
|
||||
import {
|
||||
BlobItem,
|
||||
BlobServiceClient,
|
||||
BlockBlobClient,
|
||||
BlockBlobUploadResponse,
|
||||
ContainerClient
|
||||
} from '@azure/storage-blob';
|
||||
import { inject, injectable } from 'inversify';
|
||||
import { Readable } from 'stream';
|
||||
import {
|
||||
MEET_AZURE_ACCOUNT_KEY,
|
||||
MEET_AZURE_ACCOUNT_NAME,
|
||||
MEET_AZURE_CONTAINER_NAME,
|
||||
MEET_AZURE_SUBCONATAINER_NAME
|
||||
} from '../../../../environment.js';
|
||||
import { errorAzureNotAvailable, internalError } from '../../../../models/error.model.js';
|
||||
import { LoggerService } from '../../../index.js';
|
||||
|
||||
@injectable()
|
||||
export class ABSService {
|
||||
private blobServiceClient: BlobServiceClient;
|
||||
private containerClient: ContainerClient;
|
||||
|
||||
constructor(@inject(LoggerService) protected logger: LoggerService) {
|
||||
if (!MEET_AZURE_ACCOUNT_NAME || !MEET_AZURE_ACCOUNT_KEY || !MEET_AZURE_CONTAINER_NAME) {
|
||||
throw new Error('Azure Blob Storage configuration is incomplete');
|
||||
}
|
||||
|
||||
const AZURE_STORAGE_CONNECTION_STRING = `DefaultEndpointsProtocol=https;AccountName=${MEET_AZURE_ACCOUNT_NAME};AccountKey=${MEET_AZURE_ACCOUNT_KEY};EndpointSuffix=core.windows.net`;
|
||||
this.blobServiceClient = BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
|
||||
this.containerClient = this.blobServiceClient.getContainerClient(MEET_AZURE_CONTAINER_NAME);
|
||||
|
||||
this.logger.debug('Azure Client initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file exists in the ABS container.
|
||||
*
|
||||
* @param blobName - The name of the blob to be checked.
|
||||
* @returns A boolean indicating whether the file exists or not.
|
||||
*/
|
||||
async exists(blobName: string): Promise<boolean> {
|
||||
const fullKey = this.getFullKey(blobName);
|
||||
|
||||
try {
|
||||
const blobClient = this.containerClient.getBlobClient(fullKey);
|
||||
const exists = await blobClient.exists();
|
||||
this.logger.verbose(`ABS exists: file '${fullKey}' ${!exists ? 'not' : ''} found`);
|
||||
return exists;
|
||||
} catch (error) {
|
||||
this.logger.warn(`ABS exists: file ${fullKey} not found`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an object to the ABS container.
|
||||
*
|
||||
* @param blobName - The name of the blob to be saved.
|
||||
* @param body - The object to be saved as a blob.
|
||||
* @returns A promise that resolves to the result of the upload operation.
|
||||
*/
|
||||
async saveObject(blobName: string, body: Record<string, unknown>): Promise<BlockBlobUploadResponse> {
|
||||
const fullKey = this.getFullKey(blobName);
|
||||
|
||||
try {
|
||||
const blockBlob: BlockBlobClient = this.containerClient.getBlockBlobClient(fullKey);
|
||||
const data = JSON.stringify(body);
|
||||
const result = await blockBlob.upload(data, Buffer.byteLength(data));
|
||||
this.logger.verbose(`ABS saveObject: successfully saved object '${fullKey}'`);
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
this.logger.error(`ABS saveObject: error saving object '${fullKey}': ${error}`);
|
||||
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
throw errorAzureNotAvailable(error);
|
||||
}
|
||||
|
||||
throw internalError('saving object to ABS');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes multiple objects from the ABS container.
|
||||
*
|
||||
* @param keys - An array of blob names to be deleted.
|
||||
* @returns A promise that resolves when all blobs are deleted.
|
||||
*/
|
||||
async deleteObjects(keys: string[]): Promise<void> {
|
||||
try {
|
||||
this.logger.verbose(`Azure deleteObjects: attempting to delete ${keys.length} blobs`);
|
||||
const deletePromises = keys.map((key) => this.deleteObject(this.getFullKey(key)));
|
||||
await Promise.all(deletePromises);
|
||||
this.logger.verbose(`Successfully deleted objects: [${keys.join(', ')}]`);
|
||||
this.logger.info(`Successfully deleted ${keys.length} objects`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Azure deleteObjects: error deleting objects: ${error}`);
|
||||
throw internalError('deleting objects from ABS');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a blob object from the ABS container.
|
||||
*
|
||||
* @param blobName - The name of the object to delete.
|
||||
*/
|
||||
protected async deleteObject(blobName: string): Promise<void> {
|
||||
try {
|
||||
const blobClient = this.containerClient.getBlobClient(blobName);
|
||||
const exists = await blobClient.exists();
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(`Blob '${blobName}' does not exist`);
|
||||
}
|
||||
|
||||
await blobClient.delete();
|
||||
} catch (error) {
|
||||
this.logger.error(`Azure deleteObject: error deleting blob '${blobName}': ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists objects in the ABS container with a specific prefix.
|
||||
*
|
||||
* @param additionalPrefix - Additional prefix relative to the subcontainer.
|
||||
* @param maxResults - Maximum number of objects to return. Defaults to 50.
|
||||
* @param continuationToken - Token to retrieve the next page of results.
|
||||
* @returns An object containing the list of blobs, continuation token and truncation status.
|
||||
*/
|
||||
async listObjectsPaginated(
|
||||
additionalPrefix = '',
|
||||
maxResults = 50,
|
||||
continuationToken?: string
|
||||
): Promise<{
|
||||
items: BlobItem[];
|
||||
continuationToken?: string;
|
||||
isTruncated?: boolean;
|
||||
}> {
|
||||
const basePrefix = this.getFullKey(additionalPrefix);
|
||||
this.logger.verbose(`ABS listObjectsPaginated: listing objects with prefix '${basePrefix}'`);
|
||||
|
||||
try {
|
||||
maxResults = Number(maxResults);
|
||||
const iterator = this.containerClient.listBlobsFlat({ prefix: basePrefix }).byPage({
|
||||
maxPageSize: maxResults,
|
||||
continuationToken:
|
||||
continuationToken && continuationToken !== 'undefined' ? continuationToken : undefined
|
||||
});
|
||||
|
||||
const response = await iterator.next();
|
||||
const segment = response.value;
|
||||
|
||||
let NextContinuationToken =
|
||||
segment.continuationToken === ''
|
||||
? undefined
|
||||
: segment.continuationToken === continuationToken
|
||||
? undefined
|
||||
: segment.continuationToken;
|
||||
let isTruncated = NextContinuationToken !== undefined;
|
||||
|
||||
// We need to check if the next page has items, if not we set isTruncated to false
|
||||
const iterator2 = this.containerClient
|
||||
.listBlobsFlat({ prefix: basePrefix })
|
||||
.byPage({ maxPageSize: maxResults, continuationToken: NextContinuationToken });
|
||||
|
||||
const response2 = await iterator2.next();
|
||||
const segment2 = response2.value;
|
||||
|
||||
if (segment2.segment.blobItems.length === 0) {
|
||||
NextContinuationToken = undefined;
|
||||
isTruncated = false;
|
||||
}
|
||||
|
||||
return {
|
||||
items: segment.segment.blobItems,
|
||||
continuationToken: NextContinuationToken,
|
||||
isTruncated: isTruncated
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(`ABS listObjectsPaginated: error listing objects with prefix '${basePrefix}': ${error}`);
|
||||
throw internalError('listing objects from ABS');
|
||||
}
|
||||
}
|
||||
|
||||
async getObjectAsJson(blobName: string): Promise<object | undefined> {
|
||||
try {
|
||||
const fullKey = this.getFullKey(blobName);
|
||||
const blobClient = this.containerClient.getBlobClient(fullKey);
|
||||
const exists = await blobClient.exists();
|
||||
|
||||
if (!exists) {
|
||||
this.logger.warn(`ABS getObjectAsJson: object '${fullKey}' does not exist`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const downloadResp = await blobClient.download();
|
||||
const downloaded = await this.streamToString(downloadResp.readableStreamBody!);
|
||||
const parsed = JSON.parse(downloaded);
|
||||
this.logger.verbose(`ABS getObjectAsJson: successfully retrieved and parsed object '${fullKey}'`);
|
||||
return parsed;
|
||||
} catch (error: any) {
|
||||
this.logger.error(`ABS getObjectAsJson: error retrieving object '${blobName}': ${error}`);
|
||||
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
throw errorAzureNotAvailable(error);
|
||||
}
|
||||
|
||||
throw internalError('getting object as JSON from ABS');
|
||||
}
|
||||
}
|
||||
|
||||
async getObjectAsStream(blobName: string, range?: { start: number; end: number }): Promise<Readable> {
|
||||
try {
|
||||
const fullKey = this.getFullKey(blobName);
|
||||
const blobClient = this.containerClient.getBlobClient(fullKey);
|
||||
|
||||
const offset = range ? range.start : 0;
|
||||
const count = range ? (range.start === 0 && range.end === 0 ? 1 : range.end - range.start + 1) : undefined;
|
||||
|
||||
const downloadResp = await blobClient.download(offset, count);
|
||||
|
||||
if (!downloadResp.readableStreamBody) {
|
||||
throw new Error('No readable stream body found in the download response');
|
||||
}
|
||||
|
||||
this.logger.info(`ABS getObjectAsStream: successfully retrieved object '${fullKey}' as stream`);
|
||||
return downloadResp.readableStreamBody as Readable;
|
||||
} catch (error: any) {
|
||||
this.logger.error(`ABS getObjectAsStream: error retrieving stream for object '${blobName}': ${error}`);
|
||||
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
throw errorAzureNotAvailable(error);
|
||||
}
|
||||
|
||||
throw internalError('getting object as stream from ABS');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the properties (headers/metadata) of a blob object.
|
||||
*
|
||||
* @param blobName - The name of the blob.
|
||||
* @returns The properties of the blob.
|
||||
*/
|
||||
async getObjectHeaders(blobName: string): Promise<{
|
||||
ContentType?: string;
|
||||
ContentLength?: number;
|
||||
LastModified?: Date;
|
||||
Etag?: string;
|
||||
Metadata?: Record<string, string>;
|
||||
}> {
|
||||
try {
|
||||
const fullKey = this.getFullKey(blobName);
|
||||
const blobClient = this.containerClient.getBlobClient(fullKey);
|
||||
this.logger.verbose(`ABS getObjectHeaders: requesting headers for object '${fullKey}'`);
|
||||
const properties = await blobClient.getProperties();
|
||||
// Return only headers/metadata relevant info
|
||||
return {
|
||||
ContentType: properties.contentType,
|
||||
ContentLength: properties.contentLength,
|
||||
LastModified: properties.lastModified,
|
||||
Etag: properties.etag,
|
||||
Metadata: properties.metadata
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(`ABS getObjectHeaders: error retrieving headers for object '${blobName}': ${error}`);
|
||||
throw internalError('getting object headers from ABS');
|
||||
}
|
||||
}
|
||||
|
||||
protected async streamToString(readable: NodeJS.ReadableStream): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
readable.on('data', (data) => chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)));
|
||||
readable.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')));
|
||||
readable.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
protected getFullKey(name: string): string {
|
||||
const prefix = `${MEET_AZURE_SUBCONATAINER_NAME}`;
|
||||
|
||||
if (name.startsWith(prefix)) {
|
||||
return name;
|
||||
}
|
||||
|
||||
return `${prefix}/${name}`;
|
||||
}
|
||||
}
|
||||
@ -114,7 +114,7 @@ export class S3StorageProvider implements StorageProvider {
|
||||
async getObjectHeaders(key: string): Promise<{ contentLength?: number; contentType?: string }> {
|
||||
try {
|
||||
this.logger.debug(`Getting object headers from S3: ${key}`);
|
||||
const data = await this.s3Service.getHeaderObject(key);
|
||||
const data = await this.s3Service.getObjectHeaders(key);
|
||||
return {
|
||||
contentLength: data.ContentLength,
|
||||
contentType: data.ContentType
|
||||
|
||||
@ -14,6 +14,7 @@ import {
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { inject, injectable } from 'inversify';
|
||||
import { Readable } from 'stream';
|
||||
import INTERNAL_CONFIG from '../../../../config/internal-config.js';
|
||||
import {
|
||||
MEET_AWS_REGION,
|
||||
MEET_S3_ACCESS_KEY,
|
||||
@ -25,7 +26,6 @@ import {
|
||||
} from '../../../../environment.js';
|
||||
import { errorS3NotAvailable, internalError } from '../../../../models/error.model.js';
|
||||
import { LoggerService } from '../../../index.js';
|
||||
import INTERNAL_CONFIG from '../../../../config/internal-config.js';
|
||||
|
||||
@injectable()
|
||||
export class S3Service {
|
||||
@ -51,11 +51,11 @@ export class S3Service {
|
||||
*/
|
||||
async exists(name: string, bucket: string = MEET_S3_BUCKET): Promise<boolean> {
|
||||
try {
|
||||
await this.getHeaderObject(name, bucket);
|
||||
this.logger.verbose(`S3 exists: file ${this.getFullKey(name)} found in bucket ${bucket}`);
|
||||
await this.getObjectHeaders(name, bucket);
|
||||
this.logger.verbose(`S3 exists: file '${this.getFullKey(name)}' found in bucket '${bucket}'`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.warn(`S3 exists: file ${this.getFullKey(name)} not found in bucket ${bucket}`);
|
||||
this.logger.warn(`S3 exists: file '${this.getFullKey(name)}' not found in bucket '${bucket}'`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -78,12 +78,12 @@ export class S3Service {
|
||||
Body: JSON.stringify(body)
|
||||
});
|
||||
const result = await this.retryOperation<PutObjectCommandOutput>(() => this.run(command));
|
||||
this.logger.verbose(`S3: successfully saved object '${fullKey}' in bucket '${bucket}'`);
|
||||
this.logger.verbose(`S3 saveObject: successfully saved object '${fullKey}' in bucket '${bucket}'`);
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`S3: error saving object '${fullKey}' in bucket '${bucket}': ${error}`);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`S3 saveObject: error saving object '${fullKey}' in bucket '${bucket}': ${error}`);
|
||||
|
||||
if (error && typeof error === 'object' && 'code' in error && error.code === 'ECONNREFUSED') {
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
throw errorS3NotAvailable(error);
|
||||
}
|
||||
|
||||
@ -93,12 +93,14 @@ export class S3Service {
|
||||
|
||||
/**
|
||||
* Bulk deletes objects from S3.
|
||||
* @param keys Array of object keys to delete. Estos keys deben incluir el subbucket (se obtiene con getFullKey).
|
||||
* @param keys Array of object keys to delete
|
||||
* @param bucket S3 bucket name (default: MEET_S3_BUCKET)
|
||||
*/
|
||||
async deleteObjects(keys: string[], bucket: string = MEET_S3_BUCKET): Promise<DeleteObjectsCommandOutput> {
|
||||
try {
|
||||
this.logger.verbose(`S3 delete: attempting to delete ${keys.length} objects from bucket ${bucket}`);
|
||||
this.logger.verbose(
|
||||
`S3 deleteObjects: attempting to delete ${keys.length} objects from bucket '${bucket}'`
|
||||
);
|
||||
const command = new DeleteObjectsCommand({
|
||||
Bucket: bucket,
|
||||
Delete: {
|
||||
@ -108,10 +110,10 @@ export class S3Service {
|
||||
});
|
||||
const result = await this.run(command);
|
||||
this.logger.verbose(`Successfully deleted objects: [${keys.join(', ')}]`);
|
||||
this.logger.info(`Successfully deleted ${keys.length} objects from bucket ${bucket}`);
|
||||
this.logger.info(`Successfully deleted ${keys.length} objects from bucket '${bucket}'`);
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
this.logger.error(`S3 bulk delete: error deleting objects in bucket ${bucket}: ${error}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`S3 deleteObjects: error deleting objects in bucket '${bucket}': ${error}`);
|
||||
throw internalError('deleting objects from S3');
|
||||
}
|
||||
}
|
||||
@ -120,11 +122,9 @@ export class S3Service {
|
||||
* List objects with pagination.
|
||||
*
|
||||
* @param additionalPrefix Additional prefix relative to the subbucket.
|
||||
* Por ejemplo, para listar metadata se pasa ".metadata/".
|
||||
* @param searchPattern Optional regex pattern to filter keys.
|
||||
* @param bucket Optional bucket name.
|
||||
* @param maxKeys Maximum number of objects to return.
|
||||
* @param maxKeys Maximum number of objects to return. Defaults to 50.
|
||||
* @param continuationToken Token to retrieve the next page.
|
||||
* @param bucket Optional bucket name. Defaults to MEET_S3_BUCKET.
|
||||
*
|
||||
* @returns The ListObjectsV2CommandOutput with Keys and NextContinuationToken.
|
||||
*/
|
||||
@ -138,7 +138,7 @@ export class S3Service {
|
||||
// Example: if s3Subbucket is "recordings" and additionalPrefix is ".metadata/",
|
||||
// it will list objects with keys that start with "recordings/.metadata/".
|
||||
const basePrefix = this.getFullKey(additionalPrefix);
|
||||
this.logger.verbose(`S3 listObjectsPaginated: listing objects with prefix "${basePrefix}"`);
|
||||
this.logger.verbose(`S3 listObjectsPaginated: listing objects with prefix '${basePrefix}'`);
|
||||
|
||||
const command = new ListObjectsV2Command({
|
||||
Bucket: bucket,
|
||||
@ -149,13 +149,13 @@ export class S3Service {
|
||||
|
||||
try {
|
||||
return await this.s3.send(command);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`S3 listObjectsPaginated: error listing objects with prefix "${basePrefix}": ${error}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`S3 listObjectsPaginated: error listing objects with prefix '${basePrefix}': ${error}`);
|
||||
throw internalError('listing objects from S3');
|
||||
}
|
||||
}
|
||||
|
||||
async getObjectAsJson(name: string, bucket: string = MEET_S3_BUCKET): Promise<Object | undefined> {
|
||||
async getObjectAsJson(name: string, bucket: string = MEET_S3_BUCKET): Promise<object | undefined> {
|
||||
try {
|
||||
const obj = await this.getObject(name, bucket);
|
||||
const str = await obj.Body?.transformToString();
|
||||
@ -174,7 +174,9 @@ export class S3Service {
|
||||
throw errorS3NotAvailable(error);
|
||||
}
|
||||
|
||||
this.logger.error(`S3 getObjectAsJson: error retrieving object ${name} from bucket ${bucket}: ${error}`);
|
||||
this.logger.error(
|
||||
`S3 getObjectAsJson: error retrieving object '${name}' from bucket '${bucket}': ${error}`
|
||||
);
|
||||
throw internalError('getting object as JSON from S3');
|
||||
}
|
||||
}
|
||||
@ -187,18 +189,17 @@ export class S3Service {
|
||||
try {
|
||||
const obj = await this.getObject(name, bucket, range);
|
||||
|
||||
if (obj.Body) {
|
||||
this.logger.info(
|
||||
`S3 getObjectAsStream: successfully retrieved object ${name} stream from bucket ${bucket}`
|
||||
);
|
||||
|
||||
return obj.Body as Readable;
|
||||
} else {
|
||||
if (!obj.Body) {
|
||||
throw new Error('Empty body response');
|
||||
}
|
||||
|
||||
this.logger.info(
|
||||
`S3 getObjectAsStream: successfully retrieved object '${name}' as stream from bucket '${bucket}'`
|
||||
);
|
||||
return obj.Body as Readable;
|
||||
} catch (error: any) {
|
||||
this.logger.error(
|
||||
`S3 getObjectAsStream: error retrieving stream for object ${name} from bucket ${bucket}: ${error}`
|
||||
`S3 getObjectAsStream: error retrieving stream for object '${name}' from bucket '${bucket}': ${error}`
|
||||
);
|
||||
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
@ -209,21 +210,21 @@ export class S3Service {
|
||||
}
|
||||
}
|
||||
|
||||
async getHeaderObject(name: string, bucket: string = MEET_S3_BUCKET): Promise<HeadObjectCommandOutput> {
|
||||
async getObjectHeaders(name: string, bucket: string = MEET_S3_BUCKET): Promise<HeadObjectCommandOutput> {
|
||||
try {
|
||||
const fullKey = this.getFullKey(name);
|
||||
const headParams: HeadObjectCommand = new HeadObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: fullKey
|
||||
});
|
||||
this.logger.verbose(`S3 getHeaderObject: requesting header for object ${fullKey} in bucket ${bucket}`);
|
||||
this.logger.verbose(`S3 getHeaderObject: requesting headers for object '${fullKey}' in bucket '${bucket}'`);
|
||||
return await this.run(headParams);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`S3 getHeaderObject: error getting header for object ${this.getFullKey(name)} in bucket ${bucket}: ${error}`
|
||||
`S3 getHeaderObject: error retrieving headers for object '${this.getFullKey(name)}' in bucket '${bucket}': ${error}`
|
||||
);
|
||||
|
||||
throw internalError('getting header for object from S3');
|
||||
throw internalError('getting object headers from S3');
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,7 +260,7 @@ export class S3Service {
|
||||
Key: fullKey,
|
||||
Range: range ? `bytes=${range.start}-${range.end}` : undefined
|
||||
});
|
||||
this.logger.verbose(`S3 getObject: requesting object ${fullKey} from bucket ${bucket}`);
|
||||
this.logger.verbose(`S3 getObject: requesting object '${fullKey}' from bucket '${bucket}'`);
|
||||
|
||||
return await this.run(command);
|
||||
}
|
||||
@ -276,7 +277,7 @@ export class S3Service {
|
||||
let delayMs = Number(INTERNAL_CONFIG.S3_INITIAL_RETRY_DELAY_MS);
|
||||
const maxRetries = Number(INTERNAL_CONFIG.S3_MAX_RETRIES_ATTEMPTS_ON_SAVE_ERROR);
|
||||
|
||||
while (true) {
|
||||
while (attempt < maxRetries) {
|
||||
try {
|
||||
this.logger.verbose(`S3 operation: attempt ${attempt + 1}`);
|
||||
return await operation();
|
||||
@ -294,6 +295,8 @@ export class S3Service {
|
||||
delayMs *= 2;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('S3 retryOperation: exceeded maximum retry attempts without success');
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user